diff --git a/.github/PULL_REQUEST_TEMPLATE/fix-issue.md b/.github/PULL_REQUEST_TEMPLATE/fix-issue.md index 506996510c7e..f7cf22eb59c7 100644 --- a/.github/PULL_REQUEST_TEMPLATE/fix-issue.md +++ b/.github/PULL_REQUEST_TEMPLATE/fix-issue.md @@ -6,14 +6,14 @@ assignees: '' --- - ## Fix #XYZ - + diff --git a/.github/PULL_REQUEST_TEMPLATE/other-pr.md b/.github/PULL_REQUEST_TEMPLATE/other-pr.md index 4b69a80460af..fad49836df92 100644 --- a/.github/PULL_REQUEST_TEMPLATE/other-pr.md +++ b/.github/PULL_REQUEST_TEMPLATE/other-pr.md @@ -6,14 +6,14 @@ assignees: '' --- - ## Description - + diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d78148c8fb6f..370b66854051 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,13 +2,24 @@ name: Dotty on: push: - branches-ignore: - - 'language-reference-stable' + ## Be careful if you add or remove something here! Quoting from + ## : + ## + ## > If you define only tags/tags-ignore or only branches/branches-ignore, the + ## > workflow won't run for events affecting the undefined Git ref. If you + ## > define neither tags/tags-ignore or branches/branches-ignore, the workflow + ## > will run for events affecting either branches or tags. + ## + ## We want the CI to run on both branches and tags, so we should either have: + ## - both (tags or tags-ignore) and (branches or branches-ignore), + ## - or neither of them. + ## But it's important to not have only one or the other. tags: - - '**' - pull_request: + - '*' branches-ignore: - - 'language-reference-stable' + - 'gh-readonly-queue/**' + pull_request: + merge_group: schedule: - cron: '0 3 * * *' # Every day at 3 AM workflow_dispatch: @@ -91,6 +102,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -136,6 +148,7 @@ jobs: github.event_name == 'push' && github.ref != 'refs/heads/main' ) + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -212,6 +225,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -252,10 +266,12 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') && !contains(github.event.pull_request.body, '[skip community_build]') + && !contains(github.event.pull_request.body, '[skip community_build_a]') ) || ( github.event_name == 'workflow_dispatch' @@ -299,10 +315,12 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') && !contains(github.event.pull_request.body, '[skip community_build]') + && !contains(github.event.pull_request.body, '[skip community_build_b]') ) || ( github.event_name == 'workflow_dispatch' @@ -346,10 +364,12 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') && !contains(github.event.pull_request.body, '[skip community_build]') + && !contains(github.event.pull_request.body, '[skip community_build_c]') ) || ( github.event_name == 'workflow_dispatch' @@ -576,45 +596,6 @@ jobs: external_repository: lampepfl/dotty-website publish_branch: gh-pages - nightly_unmanaged_community_build: - # Self-hosted runner is used only for getting current build version - runs-on: [self-hosted, Linux] - container: - image: lampepfl/dotty:2021-03-22 - options: --cpu-shares 4096 - volumes: - - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [publish_nightly] - if: "(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && github.repository == 'lampepfl/dotty'" - env: - NIGHTLYBUILD: yes - steps: - - name: Reset existing repo - run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - - - name: Checkout cleanup script - uses: actions/checkout@v3 - - - name: Cleanup - run: .github/workflows/cleanup.sh - - - name: Git Checkout - uses: actions/checkout@v3 - - - name: Add SBT proxy repositories - run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - - - name: Get version string for this build - run: | - ver=$(./project/scripts/sbt "print scala3-compiler-bootstrapped/version" | tail -n1) - echo "This build version: $ver" - echo "THISBUILD_VERSION=$ver" >> $GITHUB_ENV - # Steps above are copy-pasted from publish_nightly, needed only to resolve THISBUILD_VERSION - - name: Trigger unmanaged community build - run: .github/workflows/scripts/triggerUnmanagedCommunityBuild.sh "${{ secrets.BUILD_TOKEN }}" "$THISBUILD_VERSION" - publish_release: permissions: contents: write # for actions/create-release to create a release diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 3dd442bedcae..bb1aec1290c0 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -1,9 +1,12 @@ name: Scala CLA on: pull_request: + branches-ignore: + - 'language-reference-stable' push: branches: - - 'language-reference-backport' + - 'language-reference-stable' + merge_group: permissions: contents: write pull-requests: write diff --git a/.github/workflows/language-reference.yaml b/.github/workflows/language-reference.yaml index a661df490a3b..ec134ec35ffe 100644 --- a/.github/workflows/language-reference.yaml +++ b/.github/workflows/language-reference.yaml @@ -1,4 +1,4 @@ -name: language-reference-documentation +name: Language reference documentation on: push: @@ -14,14 +14,14 @@ permissions: jobs: build-and-push: + name: Build reference documentation and push it permissions: contents: write # for Git to git push - pull-requests: write # for peter-evans/create-pull-request to create a PR runs-on: ubuntu-latest steps: - name: Get current date id: date - run: echo "::set-output name=date::$(date +'%Y-%m-%d')" + run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT - name: Git Checkout uses: actions/checkout@v3 @@ -67,26 +67,22 @@ jobs: fi cd .. - - name: Merge changes to main - if: github.event_name == 'push' - run: | - cd dotty - git fetch origin main:main - git checkout main - git config user.name gh-actions - git config user.email actions@github.com - git merge language-reference-stable - cd .. - - - name: Create pull request with backport to main - if: github.event_name == 'push' - uses: peter-evans/create-pull-request@v4 + backport-to-main: + name: Create pull request with backport to main + permissions: + pull-requests: write # for repo-sync/pull-request to create a PR + runs-on: ubuntu-latest + if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' + steps: + - uses: actions/checkout@v3 + - uses: repo-sync/pull-request@v2 with: - path: dotty - branch: language-reference-backport - labels: area:documentation - title: Backport changes from stable documentation branch - body: This pull request is created automatically after push to stable documentation branch and backports the changes - reviewers: pikinier20,julienrf - assignees: pikinier20 + destination_branch: main + pr_label: area:documentation + pr_title: Sync with the stable documentation branch + pr_body: | + This pull request is syncing the main with changes from language-reference-stable. + + It was created automatically after ${{ github.event.head_commit.id }} by @${{ github.event.head_commit.author.username }} + pr_assignee: ${{ github.event.head_commit.author.username }} diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 7415759078ac..ba4bae0456d0 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -13,9 +13,9 @@ jobs: options: --cpu-shares 4096 env: - SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} + SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} SDKMAN_TOKEN: ${{ secrets.SDKMAN_TOKEN }} - + steps: - name: Reset existing repo run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true diff --git a/.github/workflows/scripts/publish-sdkman.sh b/.github/workflows/scripts/publish-sdkman.sh index 07d35a72a65e..70987bff175b 100755 --- a/.github/workflows/scripts/publish-sdkman.sh +++ b/.github/workflows/scripts/publish-sdkman.sh @@ -9,11 +9,11 @@ set -u -# latest stable dotty version +# latest stable dotty version DOTTY_VERSION=$(curl -s https://api.github.com/repos/lampepfl/dotty/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') DOTTY_URL="https://github.com/lampepfl/dotty/releases/download/$DOTTY_VERSION/scala3-$DOTTY_VERSION.zip" -# checking if dotty version is available +# checking if dotty version is available if ! curl --output /dev/null --silent --head --fail "$DOTTY_URL"; then echo "URL doesn't exist: $DOTTY_URL" exit 1 diff --git a/.github/workflows/scripts/triggerUnmanagedCommunityBuild.sh b/.github/workflows/scripts/triggerUnmanagedCommunityBuild.sh deleted file mode 100755 index 694428e29bb5..000000000000 --- a/.github/workflows/scripts/triggerUnmanagedCommunityBuild.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash - -# This is script for triggering unamanged community build upon releasing nightly version. -# Script sends request to CB Jenkins instance to start the build for given released Scala version -# Prints url of created job to stdout -# -# Requirement: -# - the latest (nightly) version of scala should be published - -set -u - -if [ $# -ne 2 ]; then - echo "Wrong number of script arguments, expected , got $#: $@" - exit 1 -fi - -CB_ENDPOINT=https://scala3.westeurope.cloudapp.azure.com -CB_BUILD_TOKEN="$1" -SCALA_VERSION="$2" - -startRunResponse=$(curl "${CB_ENDPOINT}/job/runBuild/buildWithParameters?token=${CB_BUILD_TOKEN}&publishedScalaVersion=${SCALA_VERSION}" -v 2>&1) -echo "${startRunResponse}" -queueItem=$(echo "${startRunResponse}" | grep -oP "< Location: \K[\w\d:/.//]+") -# Wait until Jenkins does acknowledge the build (max 1 min ) -for i in {1..12}; do - buildUrl=$(curl -s "${queueItem}/api/json?tree=executable[url]" | jq .executable.url) - if [[ "null" == "${buildUrl}" ]]; then - echo "Waiting for build start..." - sleep 5 - else - echo "Created build url: ${buildUrl}" - exit 0 - fi -done - -# Set error if failed to resolve build url -exit 1 diff --git a/.gitignore b/.gitignore index eb9541428302..5240662741bb 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,9 @@ metals.sbt .idea_modules /.worksheet/ +# scala-cli +.scala-build + # Partest dotty.jar dotty-lib.jar @@ -90,3 +93,5 @@ compiler/test-coursier/run/*.jar # docs related contributors.js content-contributors.css +docs/_spec/_site/ +docs/_spec/.jekyll-metadata diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3267d1f02700..48206fd67b3c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,60 +1,5 @@ -# Dotty Developer Guidelines +# Contributing to Dotty -These guidelines are meant to be a living document that should be changed and adapted as needed. We encourage changes that make it easier to achieve our goals in an efficient way. - -## General Workflow - -This is the process for committing code to the Scala project. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc. - -1. Make sure you have signed the [Scala CLA](https://www.lightbend.com/contribute/cla/scala), if not, sign it. -2. Before starting to work on a feature or a fix, it's good practice to ensure that: - 1. There is a ticket for your work in the project's [issue tracker](https://github.com/lampepfl/dotty/issues); - 2. The ticket has been discussed and prioritized by the team. -3. You should always perform your work in its own Git branch. The branch should be given a descriptive name that explains its intent. Some teams also like adding the ticket number and/or the [GitHub](http://github.com) user ID to the branch name, these details is up to each of the individual teams. (See below for more details on branch naming.) -4. When the feature or fix is completed you should open a [Pull Request](https://help.github.com/articles/using-pull-requests) on GitHub. -5. The Pull Request should be reviewed by other maintainers (as many as feasible/practical). Note that a reviewer can also be an outside contributor—members of Typesafe or VirtusLab and independent contributors are encouraged to participate in the review process. It is not a closed process. Please try to avoid conflict of interest—the spirit of the review process is to evenly distribute the understanding of our code base across its maintainers as well as to load balance quality assurance. Assigning a review to a "sure win" reviewer is not a good long-term solution. -6. After the review, you should resolve issues brought up by the reviewers as needed (pushing a new commit to address reviewers' comments), iterating until the reviewers give their thumbs up, the "LGTM" (acronym for "Looks Good To Me"). -7. Once the code has passed review the Pull Request can be merged into the distribution. - -## Pull Request Requirements - -In order for a Pull Request to be considered, it has to meet these requirements: - -1. Live up to the current code standard: - - Not violate [DRY](https://www.oreilly.com/library/view/97-things-every/9780596809515/ch30.html). - - [Boy Scout Rule](https://www.oreilly.com/library/view/97-things-every/9780596809515/ch08.html) should be applied. -2. Tests are of paramount importance. -3. The code must be well documented in the project's standard documentation format (see the ‘Documentation’ section below). - -If *all* of these requirements are not met then the code should **not** be merged into the distribution, and need not even be reviewed. - -## Documentation - -All contributed code should come accompanied by documentation. Pull requests containing undocumented code will not be accepted. Both user-facing Scaladoc comments, as well as committer-facing internal documentation (i.e. essential design decisions that other maintainers should know about should be placed inline with line comments `//`) should be accompanying all contributed code where possible. - - -## Work In Progress - -It is ok to work on a public feature branch in the GitHub repository. Something that can sometimes be useful for early feedback etc. If so, then it is preferable to name the branch accordingly. This can be done by either prefixing the name with ``wip-`` as in ‘Work In Progress’, or use hierarchical names like ``wip/..``, ``feature/..`` or ``topic/..``. Either way is fine as long as it is clear that it is work in progress and not ready for merge. This work can temporarily have a lower standard. However, to be merged into master it will have to go through the regular process outlined above, with Pull Request, review etc.. - -Also, to facilitate both well-formed commits and working together, the ``wip`` and ``feature``/``topic`` identifiers also have special meaning. Any branch labeled with ``wip`` is considered “git-unstable” and may be rebased and have its history rewritten. Any branch with ``feature``/``topic`` in the name is considered “stable” enough for others to depend on when a group is working on a feature. - -## Creating Commits And Writing Commit Messages - -Follow these guidelines when creating public commits and writing commit messages. - -1. If your work spans multiple local commits (for example; if you do safe point commits while working in a feature branch or work in a branch for long time doing merges/rebases etc.) then please do not commit it all but rewrite the history by squashing the commits into one large commit which is accompanied by a detailed commit message for (as discussed in the following sections). For more info, see the article: [Git Workflow](http://sandofsky.com/blog/git-workflow.html). Additionally, every commit should be able to be used in isolation—that is, each commit must build and pass all tests. -2. The first line should be a descriptive sentence about what the commit is doing. It should be possible to fully understand what the commit does by just reading this single line. It is **not ok** to only list the ticket number, type "minor fix" or similar. If the commit has a corresponding ticket, include a reference to the ticket number, prefixed with "Closes #", at the beginning of the first line followed by the title of the ticket, assuming that it aptly and concisely summarizes the commit in a single line. If the commit is a small fix, then you are done. If not, go to 3. -3. Following the single line description (ideally no more than 70 characters long) should be a blank line followed by an enumerated list with the details of the commit. -4. Add keywords for your commit (depending on the degree of automation we reach, the list may change over time): - * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone is encouraged to give feedback, however. (Remember that @-mentions will result in notifications also when pushing to a WIP branch, so please only include this in your commit message when you're ready for your pull request to be reviewed. Alternatively, you may request a review in the pull request's description.) - * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the ticket as fixed in the issue tracker (Assembla understands this). - * ``backport to _branch name_`` - if the fix needs to be cherry-picked to another branch (like 2.9.x, 2.10.x, etc) - -Example: - - Closes #2 Fixes the build - - - Details 1 - - Details 2 - - Details 3 +Firstly, thanks for being willing to contribute to Dotty! Head on over the +[Scala 3 Contributing +Guide](https://docs.scala-lang.org/scala3/guides/contribution/contribution-intro.html), which should have all the info you're looking for. diff --git a/MAINTENANCE.md b/MAINTENANCE.md index 7bde90839724..d1309a6b404d 100644 --- a/MAINTENANCE.md +++ b/MAINTENANCE.md @@ -1,9 +1,12 @@ # Issue Supervisor Role -This document formally defines the _Issue Supervisor_ role. This is a repository maintenance role that is assigned to core contributors on rotating basis. + +This document formally defines the _Issue Supervisor_ role. This is a repository maintenance role that is assigned to core contributors on a rotating basis. ## Responsibilities -Issue supervisor is responsible for: -- Health of the CI, nightly releases and benchmark infrastructure. + +The issue supervisor is responsible for: + +- The health of the CI, nightly releases and benchmark infrastructure. - PRs of external contributors: assigning someone to review, or handling themselves. - Triaging issues (especially new): - Each issue needs to be assigned an `itype` and 1 or more `area` labels. @@ -12,33 +15,39 @@ Issue supervisor is responsible for: - Modifying issue labels to best capture information about the issues - Attempting to reproduce the issue (or label “stat:cannot reproduce”) - Further minimizing the issue or asking the reporter of the issue to minimize it correctly (or label “stat:needs minimization”) + - Identifying which issues are of considerable importance and bringing them to the attention of the team during the Dotty meeting, where they can be filtered and added to the [Future Versions](https://github.com/lampepfl/dotty/milestone/46) milestone. Other core teammates are responsible for providing information to the issue supervisor in a timely manner when it is requested if they have that information. ## Assignment -Issue supervisor is appointed for 7 days and is responsible for what is specified in the “Responsibilities” section during those 7 days. Their assumption of the role starts from the Dotty Meeting on Monday and ends on the next Dotty Meeting on Monday. + +The issue supervisor is appointed for 7 days and is responsible for what is specified in the “Responsibilities” section during those 7 days. Their assumption of the role starts from the Dotty Meeting on Monday and ends on the next Dotty Meeting on Monday. During the Dotty Meeting, an issue supervisor is assigned for the current week and for the week after that. -Issue supervisor schedule is maintained in the [Issue Supervisor Statistics spreadsheet](https://docs.google.com/spreadsheets/d/19IAqNzHfJ9rsii3EsjIGwPz5BLTFJs_byGM3FprmX3E/edit?usp=sharing). So, someone who knows their availability several weeks ahead into the future can assign themselves to be an issue supervisor well ahead of time. +The issue supervisor schedule is maintained in the [Issue Supervisor Statistics spreadsheet](https://docs.google.com/spreadsheets/d/19IAqNzHfJ9rsii3EsjIGwPz5BLTFJs_byGM3FprmX3E/edit?usp=sharing). So, someone who knows their availability several weeks ahead into the future can assign themselves to be an issue supervisor well ahead of time. ## Prerequisites + An issue supervisor needs to have all the accesses and privileges required to get their job done. This might include: + - Admin rights in lampepfl/dotty repository - Admin rights in lampepfl/dotty-feature-requests repository -- Permissions to create new repositories in lampepfl organization (needed to fork repositories for the community build) +- Permission to create new repositories in lampepfl organization (needed to fork repositories for the community build) - Access to the LAMP slack to be able to ask for help with the infrastructure, triaging and such ## Procedures -To ensure proper health of the infrastructure, the supervisor regularly monitors its proper operation. If a malfunction is detected, the supervisor's job is to ensure that someone is working on it (or solve it on their own). + +To ensure the proper health of the infrastructure, the supervisor regularly monitors its proper operation. If a malfunction is detected, the supervisor's job is to ensure that someone is working on it (or solve it on their own). If it is unclear what area an issue belongs to, the supervisor asks for advice from other team members on Slack or GitHub. If, after asking for advice, it turns out that nobody in the team knows how to classify it, the issue must be classified with a “stat:needs triage” label. If it is unclear who should review an external PR, the supervisor asks for advice from the rest of the core team. If after asking for advice, it is still unclear who should do it, the reviewer for such a PR will be decided at the next Dotty meeting. -In general, if anything else is unclear for proper fulfillment of responsibilities, the supervisor must proactively seek advice from other team members on Slack or other channels. +In general, if anything else is unclear for the proper fulfillment of responsibilities, the supervisor must proactively seek advice from other team members on Slack or other channels. ## Reporting + At the end of their supervision period, the supervisor reports to the team during the Dotty meeting on the following points: - Whether there were any incidents with the CI, nightlies and benchmarks, how they were resolved and what steps were taken to prevent them from happening in the future. @@ -46,8 +55,10 @@ At the end of their supervision period, the supervisor reports to the team durin - How many new issues were opened during their supervision period? Were there any areas that got a lot of issues? How many regressions from a prior Scala 3 release were there? Which were designated for an MSc project or an Issue Spree? - If new labels were created or old ones were removed, or there is any other feedback on how to improve the issue supervision, mention that. - Unassigned PRs and issues that the team failed to classify: bring them one by one so that the team can make a decision on them. +- Issues of importance – candidates for the Future Versions milestone. + +## Maintenance List -# Maintenance List The following is the list of all the principal areas of the compiler and the core team members who are responsible for their maintenance: - Parser: @odersky @@ -73,5 +84,5 @@ The following is the list of all the principal areas of the compiler and the cor - Vulpix: @dwijnand, @prolativ - JVM backend: @Kordyjan, (@sjrd) - Derivation & Mirrors: @bishabosha, (@dwijnand) -- Linting (especially unused warnings) / Reporting UX : VirtusLab TBD? +- Linting (especially unused warnings) / Reporting UX: VirtusLab TBD? - Java-compat: @Kordyjan diff --git a/NOTICE.md b/NOTICE.md index 64ebae49efe5..f4d0e6ed2b5a 100644 --- a/NOTICE.md +++ b/NOTICE.md @@ -1,6 +1,6 @@ Dotty (https://dotty.epfl.ch) -Copyright 2012-2020 EPFL -Copyright 2012-2020 Lightbend, Inc. +Copyright 2012-2023 EPFL +Copyright 2012-2023 Lightbend, Inc. Licensed under the Apache License, Version 2.0 (the "License"): http://www.apache.org/licenses/LICENSE-2.0 diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala new file mode 100644 index 000000000000..fb2cedbb7d41 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala @@ -0,0 +1,49 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit +import java.util.concurrent.{Executors, ExecutorService} + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +class ContendedInitialization { + + @Param(Array("2000000", "5000000")) + var size: Int = _ + + @Param(Array("2", "4", "8")) + var nThreads: Int = _ + + var executor: ExecutorService = _ + + @Setup + def prepare: Unit = { + executor = Executors.newFixedThreadPool(nThreads) + } + + @TearDown + def cleanup: Unit = { + executor.shutdown() + executor = null + } + + @Benchmark + def measureContended(bh: Blackhole): Unit = { + val array = Array.fill(size)(new LazyHolder) + val task: Runnable = () => + for (elem <- array) bh.consume(elem.value) + + val futures = + for (_ <- 0 until nThreads) yield + executor.submit(task) + + futures.foreach(_.get()) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala new file mode 100644 index 000000000000..d413458d0049 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccess { + + var holder: LazyHolder = _ + + @Setup + def prepare: Unit = { + holder = new LazyHolder + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala new file mode 100644 index 000000000000..8c75f6bb11a2 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyAnyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessAny { + + var holder: LazyAnyHolder = _ + + @Setup + def prepare: Unit = { + holder = new LazyAnyHolder + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala new file mode 100644 index 000000000000..a9fecae6281e --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyGenericHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessGeneric { + + var holder: LazyGenericHolder[String] = _ + + @Setup + def prepare: Unit = { + holder = new LazyGenericHolder[String]("foo") + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala new file mode 100644 index 000000000000..2a115ad63496 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations.* +import org.openjdk.jmh.infra.Blackhole +import LazyVals.LazyIntHolder +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessInt { + + var holder: LazyIntHolder = _ + + @Setup + def prepare: Unit = { + holder = new LazyIntHolder + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala new file mode 100644 index 000000000000..4f3c75fd920b --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala @@ -0,0 +1,34 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessMultiple { + + var holders: Array[LazyHolder] = _ + + @Setup + def prepare: Unit = { + holders = Array.fill(100){ new LazyHolder } + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + var i = 0 + while(i < 100) { + val currentHolder = holders(i) + bh.consume(currentHolder) + bh.consume(currentHolder.value) + i = i + 1 + } + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala new file mode 100644 index 000000000000..e6c6cd5eb2e3 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyStringHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessString { + + var holder: LazyStringHolder = _ + + @Setup + def prepare: Unit = { + holder = new LazyStringHolder + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedObject.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedObject.scala new file mode 100644 index 000000000000..672cc4bf6544 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedObject.scala @@ -0,0 +1,22 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations.* +import org.openjdk.jmh.infra.Blackhole +import LazyVals.ObjectHolder +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedObject { + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(ObjectHolder) + bh.consume(ObjectHolder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala new file mode 100644 index 000000000000..26ebb7b9d356 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala @@ -0,0 +1,71 @@ +package dotty.tools.benchmarks.lazyvals +import java.util.concurrent.CountDownLatch +object LazyVals { + + trait Foo + class Bar1 extends Foo + class Bar2 extends Foo + class Bar3 extends Foo + class Bar4 extends Foo + class Bar5 extends Bar4 + + class LazyStringHolder { + + lazy val value: String = { + System.nanoTime() % 5 match { + case 0 => "abc" + case 1 => "def" + case 2 => "ghi" + case 3 => "jkl" + case 4 => "mno" + } + } + } + + class LazyHolder { + + lazy val value: List[Int] = { + System.nanoTime() % 5 match { + case 0 => 1 :: 2 :: Nil + case 1 => Nil + case 2 => 1 :: Nil + case 3 => Nil + case 4 => 1 :: 2 :: 3 :: Nil + } + } + } + + class LazyGenericHolder[A](v: => A) { + lazy val value: A = v + } + + class LazyAnyHolder { + lazy val value: Any = { + System.nanoTime() % 5 match { + case 0 => new Bar1 + case 1 => new Bar2 + case 2 => new Bar3 + case 3 => new Bar4 + case 4 => new Bar4 + } + } + } + + class LazyIntHolder { + lazy val value: Int = { + (System.nanoTime() % 1000).toInt + } + } + + object ObjectHolder { + lazy val value: String = { + System.nanoTime() % 5 match { + case 0 => "abc" + case 1 => "def" + case 2 => "ghi" + case 3 => "jkl" + case 4 => "mno" + } + } + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccess.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccess.scala new file mode 100644 index 000000000000..417d22f67d48 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccess.scala @@ -0,0 +1,25 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class UninitializedAccess { + + @Benchmark + def measureInitialized(bh: Blackhole) = { + var i = 0 + val holder = new LazyHolder + bh.consume(holder) + bh.consume(holder.value) + i = i + 1 + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccessMultiple.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccessMultiple.scala new file mode 100644 index 000000000000..133a0932bf51 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/UninitializedAccessMultiple.scala @@ -0,0 +1,27 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations._ +import LazyVals.LazyHolder +import org.openjdk.jmh.infra.Blackhole +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class UninitializedAccessMultiple { + + @Benchmark + def measureInitialized(bh: Blackhole) = { + var i = 0 + while(i < 100) { + val holder = new LazyHolder + bh.consume(holder) + bh.consume(holder.value) + i = i + 1 + } + } +} diff --git a/changelogs/3.2.1-RC1.md b/changelogs/3.2.1-RC1.md new file mode 100644 index 000000000000..217152fed9b2 --- /dev/null +++ b/changelogs/3.2.1-RC1.md @@ -0,0 +1,189 @@ +# Highlights of the release + +- Add experimental capture checking [#15877](https://github.com/lampepfl/dotty/pull/15877) +- Scaladoc: New UI design [#15697](https://github.com/lampepfl/dotty/pull/15697) + +# Other changes and fixes + +## Backend + +- Set ACC_FINAL access flag to final variable accessors [#15707](https://github.com/lampepfl/dotty/pull/15707) + +## Coverage + +- Avoid instrumentation of inline and erased definitions [#15504](https://github.com/lampepfl/dotty/pull/15504) + +## Erasure + +- Fix #15199: Exclude JavaDefined Modules from bridge generation. [#15499](https://github.com/lampepfl/dotty/pull/15499) + +## Extension Methods + +- Merge nested polytypes in more cases in resolveOverloaded [#15636](https://github.com/lampepfl/dotty/pull/15636) + +## GADTs + +- Fix GADT casting when typing if expressions [#15646](https://github.com/lampepfl/dotty/pull/15646) +- Use GADT constraints in maximiseType [#15544](https://github.com/lampepfl/dotty/pull/15544) +- Add the expected type to Poly's desugaring [#15570](https://github.com/lampepfl/dotty/pull/15570) +- Allow refineUsingParent to infer GADT bounds [#15706](https://github.com/lampepfl/dotty/pull/15706) +- Avoid leaking internal types in GadtConstraint.approximation [#15558](https://github.com/lampepfl/dotty/pull/15558) +- Improve GADT usage tracing for MatchType reduction [#15872](https://github.com/lampepfl/dotty/pull/15872) +- Add `gadtAddBound` to ExplainingTypeComparer tracing [#15819](https://github.com/lampepfl/dotty/pull/15819) + +## IDE Support + +- Recover from TypeErrors in isErroneous [#15442](https://github.com/lampepfl/dotty/pull/15442) +- Add a guard against empty ident [#15542](https://github.com/lampepfl/dotty/pull/15542) + +## Initialization + +- Fix #15465: Use resolveThis for outerSelect resolution [#15606](https://github.com/lampepfl/dotty/pull/15606) +- Fix #15459: Display uninitialized fields in promotion error [#15488](https://github.com/lampepfl/dotty/pull/15488) +- Fix #15363: Improve error messages for leaking of this [#15364](https://github.com/lampepfl/dotty/pull/15364) +- Make sure inner classes are checked first [#15527](https://github.com/lampepfl/dotty/pull/15527) +- Handle super accessors in initialization checker [#15703](https://github.com/lampepfl/dotty/pull/15703) + +## Inline + +- Check is inline unapply has leading implicits [#15583](https://github.com/lampepfl/dotty/pull/15583) +- Fix #15374: Make sure prefix of outer select has the correct class symbol [#15592](https://github.com/lampepfl/dotty/pull/15592) + +## Java Interoperability + +- Don't check import selectors for Java code [#15617](https://github.com/lampepfl/dotty/pull/15617) + +## JS Interoperability + +- Fix #15701: Implement js.dynamicImport for dynamic module loading. [#15720](https://github.com/lampepfl/dotty/pull/15720) +- Implement support for js.`new`.target. [#15734](https://github.com/lampepfl/dotty/pull/15734) +- Fix #14488: Scala.js: Add compiler support for scala.Enumeration. [#15770](https://github.com/lampepfl/dotty/pull/15770) + +## Match Types + +- Avoid null types when reducing match types [#15748](https://github.com/lampepfl/dotty/pull/15748) +- Avoid references to unbound parameters in applied type patterns [#15710](https://github.com/lampepfl/dotty/pull/15710) + +## Nullability + +- Fix checking ctx to carry correct modes [#15350](https://github.com/lampepfl/dotty/pull/15350) + +## Pattern Matching + +- Teach provablyDisjoint to handle FromJavaObject [#15769](https://github.com/lampepfl/dotty/pull/15769) +- Don't trust case class extractors with explicit type arguments [#15669](https://github.com/lampepfl/dotty/pull/15669) +- Space: Fix how sealed abstract classes decompose [#15553](https://github.com/lampepfl/dotty/pull/15553) +- Local classes are uncheckable (type tests) [#15134](https://github.com/lampepfl/dotty/pull/15134) +- Fix ValDef span assignment in PatternMatcher [#15783](https://github.com/lampepfl/dotty/pull/15783) +- Reject all explicitly written type references with bad bounds [#15577](https://github.com/lampepfl/dotty/pull/15577) + +## Pickling + +- Make simplify replace type parameters inside method types [#15430](https://github.com/lampepfl/dotty/pull/15430) + +## Quotes + +- Ignore types in macro runtime dependencies [#15529](https://github.com/lampepfl/dotty/pull/15529) + +## REPL + +- ReplDriver.run and :load take complete input [#15811](https://github.com/lampepfl/dotty/pull/15811) +- REPL goes through a phase [#15663](https://github.com/lampepfl/dotty/pull/15663) +- Avoid assertion failure for illegal trait inheritance [#15631](https://github.com/lampepfl/dotty/pull/15631) + +## Reporting + +- Underline assignment correctly in error message [#15584](https://github.com/lampepfl/dotty/pull/15584) +- Better error message for "implicit search must be more specific" [#15747](https://github.com/lampepfl/dotty/pull/15747) +- Provide better info on compiler crashes [#15890](https://github.com/lampepfl/dotty/pull/15890) + +## SemanticDB + +- Handle colons in file names when producing SemanticDB [#15863](https://github.com/lampepfl/dotty/pull/15863) + +## Standard Library + +- Fix `throwReturn` of `NonLocalReturns` to allow wider usage [#15495](https://github.com/lampepfl/dotty/pull/15495) + +## Tooling + +- Fix scala runner exit codes [#15604](https://github.com/lampepfl/dotty/pull/15604) +- Command line parser respects outer escaped quote [#15497](https://github.com/lampepfl/dotty/pull/15497) +- Added quotes to Scala home paths in scala and scalac scripts [#15824](https://github.com/lampepfl/dotty/pull/15824) +- Retain partial type params typying information on error [#15825](https://github.com/lampepfl/dotty/pull/15825) + +## Transform + +- Implementation restriction: No partial functions with CFT results [#15744](https://github.com/lampepfl/dotty/pull/15744) +- Fix treatment of parameter selections via this in constructors. [#15737](https://github.com/lampepfl/dotty/pull/15737) +- Fix expansion and unexpansion of mixin qualified names [#15712](https://github.com/lampepfl/dotty/pull/15712) +- Fix #15514 in Parser [#15524](https://github.com/lampepfl/dotty/pull/15524) +- Check type arguments for bad bounds [#15571](https://github.com/lampepfl/dotty/pull/15571) + +## Typer + +- Fix swapKey [#15894](https://github.com/lampepfl/dotty/pull/15894) +- Insert GADT casts for needy match types [#15851](https://github.com/lampepfl/dotty/pull/15851) +- Add missing dealias in isContextFunctionRef [#15742](https://github.com/lampepfl/dotty/pull/15742) +- Look for synthetic applies also under type applications [#15572](https://github.com/lampepfl/dotty/pull/15572) +- Optimize isSameType for invariant applied types with the same structure [#15556](https://github.com/lampepfl/dotty/pull/15556) +- Fix cycle detection for type aliases with wildcard arguments [#15508](https://github.com/lampepfl/dotty/pull/15508) +- Handle recursions in isFullyDefined [#15443](https://github.com/lampepfl/dotty/pull/15443) +- Do level checking on instantiation [#15746](https://github.com/lampepfl/dotty/pull/15746) +- Use fullyDefinedType for mirror synthesis [#15814](https://github.com/lampepfl/dotty/pull/15814) +- Instantiate more type variables to hard unions [#15632](https://github.com/lampepfl/dotty/pull/15632) +- Simplify level fixing scheme [#15936](https://github.com/lampepfl/dotty/pull/15936) +- Don't normalize in `AppliedType#superType` [#15453](https://github.com/lampepfl/dotty/pull/15453) +- Try promotion while widening arguments [#15467](https://github.com/lampepfl/dotty/pull/15467) +- Check import selectors at Typer [#15477](https://github.com/lampepfl/dotty/pull/15477) +- Turn some calls to `underlying` into `superType`. [#15455](https://github.com/lampepfl/dotty/pull/15455) +- Find more looping implicits [#15481](https://github.com/lampepfl/dotty/pull/15481) +- Fix `findFunctionType` for `OrTypes` [#15478](https://github.com/lampepfl/dotty/pull/15478) +- Fix looping implicits check [#15655](https://github.com/lampepfl/dotty/pull/15655) +- Try to avoid static symbols if leaving them would make a leak [#15548](https://github.com/lampepfl/dotty/pull/15548) +- Do not fold `IsConst` applied to dependent parameters [#15759](https://github.com/lampepfl/dotty/pull/15759) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.1.3..3.2.0` these are: + +``` + 204 Martin Odersky + 94 Filip Łukasik + 69 Fengyun Liu + 57 Filip Zybała + 31 Dale Wijnand + 14 Sébastien Doeraene + 10 Guillaume Raffin + 9 rochala + 8 Chris Kipp + 8 Paweł Marks + 8 Som Snytt + 7 Jędrzej Rochala + 7 Nicolas Stucki + 7 Yichen Xu + 6 Jamie Thompson + 6 Tom Grigg + 5 noti0na1 + 2 Arman Bilge + 2 Kacper Korban + 2 Matt Bovel + 2 Ondrej Lhotak + 2 Quentin Bernet + 2 Tomasz Godzik + 1 Adrien Piquerez + 1 Florian3k + 1 Gagandeep Kalra + 1 Jentsch + 1 Kieren Davies + 1 Michał Pałka + 1 Naveen + 1 Oron Port + 1 Rubin Simons + 1 Seth Tisue + 1 Wojciech Mazur + 1 esteban marin + 1 naveen +``` diff --git a/changelogs/3.2.1-RC2.md b/changelogs/3.2.1-RC2.md new file mode 100644 index 000000000000..5242481120d6 --- /dev/null +++ b/changelogs/3.2.1-RC2.md @@ -0,0 +1,18 @@ +# Backported fixes + +- Only look for synthetic applies under `TypeApply` with inferred arguments [#15970](https://github.com/lampepfl/dotty/pull/15970) +- Improvements for implicit searches with top-level type variables [#16001](https://github.com/lampepfl/dotty/pull/16001) +- Better types for class type parameters [#15951](https://github.com/lampepfl/dotty/pull/15951) +- Avoid adding `NoSymbol` to gadt constraints in `TypeOps.instantiateToSubType` [#15965](https://github.com/lampepfl/dotty/pull/15965) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.1-RC1..3.2.1-RC2` these are: + +``` + 7 Martin Odersky + 3 Kacper Korban + 2 Paweł Marks +``` diff --git a/changelogs/3.2.1-RC3.md b/changelogs/3.2.1-RC3.md new file mode 100644 index 000000000000..13a186432c97 --- /dev/null +++ b/changelogs/3.2.1-RC3.md @@ -0,0 +1,16 @@ +# Backported fixes + +- Fix tuple casting [#16113](https://github.com/lampepfl/dotty/pull/16113) +- Be even more careful when combining argument and info in `computeAsSeenFrom` [#16070](https://github.com/lampepfl/dotty/pull/16070) +- Be still more careful when computing denotations of class parameters [#16112](https://github.com/lampepfl/dotty/pull/16112) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.1-RC2..3.2.1-RC3` these are: + +``` + 4 Martin Odersky + 2 Paweł Marks +``` diff --git a/changelogs/3.2.1-RC4.md b/changelogs/3.2.1-RC4.md new file mode 100644 index 000000000000..89957185c508 --- /dev/null +++ b/changelogs/3.2.1-RC4.md @@ -0,0 +1,14 @@ +# Backported fixes + +- Update Scala 2 library dependency to 2.13.10 [#16074](https://github.com/lampepfl/dotty/pull/16074) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.1-RC3..3.2.1-RC4` these are: + +``` + 2 Paweł Marks + 1 Seth Tisue +``` diff --git a/changelogs/3.2.1.md b/changelogs/3.2.1.md new file mode 100644 index 000000000000..c0292ca6a977 --- /dev/null +++ b/changelogs/3.2.1.md @@ -0,0 +1,198 @@ +# Highlights of the release + +- Add experimental capture checking [#15877](https://github.com/lampepfl/dotty/pull/15877) +- Scaladoc: New UI design [#15697](https://github.com/lampepfl/dotty/pull/15697) + +# Other changes and fixes + +## Backend + +- Set ACC_FINAL access flag to final variable accessors [#15707](https://github.com/lampepfl/dotty/pull/15707) + +## Coverage + +- Avoid instrumentation of inline and erased definitions [#15504](https://github.com/lampepfl/dotty/pull/15504) + +## Erasure + +- Fix #15199: Exclude JavaDefined Modules from bridge generation. [#15499](https://github.com/lampepfl/dotty/pull/15499) + +## Extension Methods + +- Merge nested polytypes in more cases in resolveOverloaded [#15636](https://github.com/lampepfl/dotty/pull/15636) + +## GADTs + +- Fix GADT casting when typing if expressions [#15646](https://github.com/lampepfl/dotty/pull/15646) +- Use GADT constraints in maximiseType [#15544](https://github.com/lampepfl/dotty/pull/15544) +- Add the expected type to Poly's desugaring [#15570](https://github.com/lampepfl/dotty/pull/15570) +- Allow refineUsingParent to infer GADT bounds [#15706](https://github.com/lampepfl/dotty/pull/15706) +- Avoid leaking internal types in GadtConstraint.approximation [#15558](https://github.com/lampepfl/dotty/pull/15558) +- Improve GADT usage tracing for MatchType reduction [#15872](https://github.com/lampepfl/dotty/pull/15872) +- Add `gadtAddBound` to ExplainingTypeComparer tracing [#15819](https://github.com/lampepfl/dotty/pull/15819) +- Avoid adding `NoSymbol` to gadt constraints in `TypeOps.instantiateToSubType` [#15965](https://github.com/lampepfl/dotty/pull/15965) + +## IDE Support + +- Recover from TypeErrors in isErroneous [#15442](https://github.com/lampepfl/dotty/pull/15442) +- Add a guard against empty ident [#15542](https://github.com/lampepfl/dotty/pull/15542) + +## Initialization + +- Fix #15465: Use resolveThis for outerSelect resolution [#15606](https://github.com/lampepfl/dotty/pull/15606) +- Fix #15459: Display uninitialized fields in promotion error [#15488](https://github.com/lampepfl/dotty/pull/15488) +- Fix #15363: Improve error messages for leaking of this [#15364](https://github.com/lampepfl/dotty/pull/15364) +- Make sure inner classes are checked first [#15527](https://github.com/lampepfl/dotty/pull/15527) +- Handle super accessors in initialization checker [#15703](https://github.com/lampepfl/dotty/pull/15703) + +## Inline + +- Check is inline unapply has leading implicits [#15583](https://github.com/lampepfl/dotty/pull/15583) +- Fix #15374: Make sure prefix of outer select has the correct class symbol [#15592](https://github.com/lampepfl/dotty/pull/15592) + +## Java Interoperability + +- Don't check import selectors for Java code [#15617](https://github.com/lampepfl/dotty/pull/15617) + +## JS Interoperability + +- Fix #15701: Implement js.dynamicImport for dynamic module loading. [#15720](https://github.com/lampepfl/dotty/pull/15720) +- Implement support for js.`new`.target. [#15734](https://github.com/lampepfl/dotty/pull/15734) +- Fix #14488: Scala.js: Add compiler support for scala.Enumeration. [#15770](https://github.com/lampepfl/dotty/pull/15770) + +## Match Types + +- Avoid null types when reducing match types [#15748](https://github.com/lampepfl/dotty/pull/15748) +- Avoid references to unbound parameters in applied type patterns [#15710](https://github.com/lampepfl/dotty/pull/15710) + +## Nullability + +- Fix checking ctx to carry correct modes [#15350](https://github.com/lampepfl/dotty/pull/15350) + +## Pattern Matching + +- Teach provablyDisjoint to handle FromJavaObject [#15769](https://github.com/lampepfl/dotty/pull/15769) +- Don't trust case class extractors with explicit type arguments [#15669](https://github.com/lampepfl/dotty/pull/15669) +- Space: Fix how sealed abstract classes decompose [#15553](https://github.com/lampepfl/dotty/pull/15553) +- Local classes are uncheckable (type tests) [#15134](https://github.com/lampepfl/dotty/pull/15134) +- Fix ValDef span assignment in PatternMatcher [#15783](https://github.com/lampepfl/dotty/pull/15783) +- Reject all explicitly written type references with bad bounds [#15577](https://github.com/lampepfl/dotty/pull/15577) + +## Pickling + +- Make simplify replace type parameters inside method types [#15430](https://github.com/lampepfl/dotty/pull/15430) + +## Quotes + +- Ignore types in macro runtime dependencies [#15529](https://github.com/lampepfl/dotty/pull/15529) + +## REPL + +- ReplDriver.run and :load take complete input [#15811](https://github.com/lampepfl/dotty/pull/15811) +- REPL goes through a phase [#15663](https://github.com/lampepfl/dotty/pull/15663) +- Avoid assertion failure for illegal trait inheritance [#15631](https://github.com/lampepfl/dotty/pull/15631) + +## Reporting + +- Underline assignment correctly in error message [#15584](https://github.com/lampepfl/dotty/pull/15584) +- Better error message for "implicit search must be more specific" [#15747](https://github.com/lampepfl/dotty/pull/15747) +- Provide better info on compiler crashes [#15890](https://github.com/lampepfl/dotty/pull/15890) + +## SemanticDB + +- Handle colons in file names when producing SemanticDB [#15863](https://github.com/lampepfl/dotty/pull/15863) + +## Standard Library + +- Fix `throwReturn` of `NonLocalReturns` to allow wider usage [#15495](https://github.com/lampepfl/dotty/pull/15495) +- Update Scala 2 library dependency to 2.13.10 [#16074](https://github.com/lampepfl/dotty/pull/16074) + +## Tooling + +- Fix scala runner exit codes [#15604](https://github.com/lampepfl/dotty/pull/15604) +- Command line parser respects outer escaped quote [#15497](https://github.com/lampepfl/dotty/pull/15497) +- Added quotes to Scala home paths in scala and scalac scripts [#15824](https://github.com/lampepfl/dotty/pull/15824) +- Retain partial type params typying information on error [#15825](https://github.com/lampepfl/dotty/pull/15825) + +## Transform + +- Implementation restriction: No partial functions with CFT results [#15744](https://github.com/lampepfl/dotty/pull/15744) +- Fix treatment of parameter selections via this in constructors. [#15737](https://github.com/lampepfl/dotty/pull/15737) +- Fix expansion and unexpansion of mixin qualified names [#15712](https://github.com/lampepfl/dotty/pull/15712) +- Fix #15514 in Parser [#15524](https://github.com/lampepfl/dotty/pull/15524) +- Check type arguments for bad bounds [#15571](https://github.com/lampepfl/dotty/pull/15571) + +## Typer + +- Fix swapKey [#15894](https://github.com/lampepfl/dotty/pull/15894) +- Insert GADT casts for needy match types [#15851](https://github.com/lampepfl/dotty/pull/15851) +- Add missing dealias in isContextFunctionRef [#15742](https://github.com/lampepfl/dotty/pull/15742) +- Look for synthetic applies also under type applications [#15572](https://github.com/lampepfl/dotty/pull/15572) +- Optimize isSameType for invariant applied types with the same structure [#15556](https://github.com/lampepfl/dotty/pull/15556) +- Fix cycle detection for type aliases with wildcard arguments [#15508](https://github.com/lampepfl/dotty/pull/15508) +- Handle recursions in isFullyDefined [#15443](https://github.com/lampepfl/dotty/pull/15443) +- Do level checking on instantiation [#15746](https://github.com/lampepfl/dotty/pull/15746) +- Use fullyDefinedType for mirror synthesis [#15814](https://github.com/lampepfl/dotty/pull/15814) +- Instantiate more type variables to hard unions [#15632](https://github.com/lampepfl/dotty/pull/15632) +- Simplify level fixing scheme [#15936](https://github.com/lampepfl/dotty/pull/15936) +- Don't normalize in `AppliedType#superType` [#15453](https://github.com/lampepfl/dotty/pull/15453) +- Try promotion while widening arguments [#15467](https://github.com/lampepfl/dotty/pull/15467) +- Check import selectors at Typer [#15477](https://github.com/lampepfl/dotty/pull/15477) +- Turn some calls to `underlying` into `superType`. [#15455](https://github.com/lampepfl/dotty/pull/15455) +- Find more looping implicits [#15481](https://github.com/lampepfl/dotty/pull/15481) +- Fix `findFunctionType` for `OrTypes` [#15478](https://github.com/lampepfl/dotty/pull/15478) +- Fix looping implicits check [#15655](https://github.com/lampepfl/dotty/pull/15655) +- Try to avoid static symbols if leaving them would make a leak [#15548](https://github.com/lampepfl/dotty/pull/15548) +- Do not fold `IsConst` applied to dependent parameters [#15759](https://github.com/lampepfl/dotty/pull/15759) +- Only look for synthetic applies under `TypeApply` with inferred arguments [#15970](https://github.com/lampepfl/dotty/pull/15970) +- Improvements for implicit searches with top-level type variables [#16001](https://github.com/lampepfl/dotty/pull/16001) +- Better types for class type parameters [#15951](https://github.com/lampepfl/dotty/pull/15951) +- Fix tuple casting [#16113](https://github.com/lampepfl/dotty/pull/16113) +- Be even more careful when combining argument and info in `computeAsSeenFrom` [#16070](https://github.com/lampepfl/dotty/pull/16070) +- Be still more careful when computing denotations of class parameters [#16112](https://github.com/lampepfl/dotty/pull/16112) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.0..3.2.1` these are: + +``` + 215 Martin Odersky + 94 Filip Łukasik + 69 Fengyun Liu + 57 Filip Zybała + 31 Dale Wijnand + 16 Paweł Marks + 14 Sébastien Doeraene + 10 Guillaume Raffin + 9 rochala + 8 Chris Kipp + 8 Som Snytt + 7 Jędrzej Rochala + 7 Nicolas Stucki + 7 Yichen Xu + 6 Jamie Thompson + 6 Tom Grigg + 5 Kacper Korban + 5 noti0na1 + 2 Arman Bilge + 2 Matt Bovel + 2 Ondrej Lhotak + 2 Quentin Bernet + 2 Seth Tisue + 2 Tomasz Godzik + 1 Adrien Piquerez + 1 Florian3k + 1 Gagandeep Kalra + 1 Jentsch + 1 Kieren Davies + 1 Michał Pałka + 1 Naveen + 1 Oron Port + 1 Rubin Simons + 1 Wojciech Mazur + 1 esteban marin + 1 naveen + +``` diff --git a/changelogs/3.2.2-RC1.md b/changelogs/3.2.2-RC1.md new file mode 100644 index 000000000000..75738995c995 --- /dev/null +++ b/changelogs/3.2.2-RC1.md @@ -0,0 +1,151 @@ +# Highlights of the release + +- Make truncation by characters in repl configurable [#16167](https://github.com/lampepfl/dotty/pull/16167) +- New (experimental for now) lazy vals implementation [#15296](https://github.com/lampepfl/dotty/pull/1) + +# Other changes and fixes + +## Coverage + +- Make coverage instrumentation more robust [#16235](https://github.com/lampepfl/dotty/pull/16235) + +## Default parameters + +- Resolve overloading: keep track of prefix and indices of all default getters [#16009](https://github.com/lampepfl/dotty/pull/16009) + +# Experimentals + +- Let accesors inherit @experimental annotations from accessed symbols [#16099](https://github.com/lampepfl/dotty/pull/16099) + +## Extension Methods + +- Fix i14451 [#16010](https://github.com/lampepfl/dotty/pull/16010) + +# GADTs + +- Add fixed GADT exhaustivity test cases [#16169](https://github.com/lampepfl/dotty/pull/16169) +- Eliminate class hierarchy in GadtConstraint [#16194](https://github.com/lampepfl/dotty/pull/16194) + +## Initialization + +- Code refactoring of initialization checker [#16066](https://github.com/lampepfl/dotty/pull/16066) + +## Inline + +- Refine checking for outer references [#16122](https://github.com/lampepfl/dotty/pull/16122) + +## Java interoperability + +- Parse native in Java bytecode as @native [#16232](https://github.com/lampepfl/dotty/pull/16232) + +## Opaque Types + +- Disallow opaque type aliases of context functions [#16041](https://github.com/lampepfl/dotty/pull/16041) + +## Parser + +- Align implementation with spec of soft modifiers [#15961](https://github.com/lampepfl/dotty/pull/15961) + +## Pattern Matching + +- Fix redundancy (unreachability) warning [#16179](https://github.com/lampepfl/dotty/pull/16179) + +# Polyfunctions + +- Avoid checking purity of Apply without symbols [#16221](https://github.com/lampepfl/dotty/pull/16221) + +## Reporting + +- Expose reason for unchecked warning [#16086](https://github.com/lampepfl/dotty/pull/16086) +- Refine AsSeenFrom approximation scheme [#15957](https://github.com/lampepfl/dotty/pull/15957) +- Better output under -Ydetailed-stats [#15950](https://github.com/lampepfl/dotty/pull/15950) +- Fix crash in -Vprofile code [#16007](https://github.com/lampepfl/dotty/pull/16007) +- Name stripped of module suffix must not be empty [#15597](https://github.com/lampepfl/dotty/pull/15597) +- Fix InlineMatch pos for `summonFrom` [#16025](https://github.com/lampepfl/dotty/pull/16025) + +## Rewrites + +- Rewrites are UTF-8 [#15622](https://github.com/lampepfl/dotty/pull/15622) + +## Scala-JS + +- Fix the detection of inferred types of `= js.native`. [#16184](https://github.com/lampepfl/dotty/pull/16184) +- Do not elide fields required for Scala.js interop semantics. [#16187](https://github.com/lampepfl/dotty/pull/16187) + +## Scaladoc + +- Update quick links for new ui [#15912](https://github.com/lampepfl/dotty/pull/15912) +- Avoid parsing code comment twice [#16154](https://github.com/lampepfl/dotty/pull/16154) +- Make footer text configurable [#16064](https://github.com/lampepfl/dotty/pull/16064) +- Add typography fallback fonts [#16063](https://github.com/lampepfl/dotty/pull/16063) + +## SemanticDB + +- Emit SymbolInformation and Occurrence for anonymous class [#15865](https://github.com/lampepfl/dotty/pull/15865) +- Fix regression in type apply synthetics [#16031](https://github.com/lampepfl/dotty/pull/16031) + +## Transform + +- Fix creating forwarders for simple alias givens [#16193](https://github.com/lampepfl/dotty/pull/16193) +- Only check for Scala2LocalSuffix in methods on Scala2x classes [#16014](https://github.com/lampepfl/dotty/pull/16014) +- Fix mapping TypeMaps over annotations [#15948](https://github.com/lampepfl/dotty/pull/15948) +- Peephole optimization to drop .apply from partially applied methods [#16022](https://github.com/lampepfl/dotty/pull/16022) + +## Typer + +- Fix approximateOr of (A & Double) | Null [#16241](https://github.com/lampepfl/dotty/pull/16241) +- Create boxed environments only for references and function values [#16136](https://github.com/lampepfl/dotty/pull/16136) +- Fix higher-order unification incorrectly substituting tparams [#16181](https://github.com/lampepfl/dotty/pull/16181) +- Handle copies of constrained type lambdas when determining an implicit scope [#16159](https://github.com/lampepfl/dotty/pull/16159) +- Create message for missing arguments [#16158](https://github.com/lampepfl/dotty/pull/16158) +- Make singleton types subtypes of their refined type [#16135](https://github.com/lampepfl/dotty/pull/16135) +- Disallow overriding val parameters [#16096](https://github.com/lampepfl/dotty/pull/16096) +- Avoid crash where creator proxies are referenced indirectly [#16098](https://github.com/lampepfl/dotty/pull/16098) +- Refine estimation of default arguments in overloading resolution [#15962](https://github.com/lampepfl/dotty/pull/15962) +- Harden canAssign [#15963](https://github.com/lampepfl/dotty/pull/15963) +- Fix isSubType for static objects filling in type projections [#15959](https://github.com/lampepfl/dotty/pull/15959) +- Fix expected type of TupleXXL unapply [#16248](https://github.com/lampepfl/dotty/pull/16248) +- Drop lazy recursive application in approximateParent [#16073](https://github.com/lampepfl/dotty/pull/16073) +- Type match with a match type when a match type is expected [#15599](https://github.com/lampepfl/dotty/pull/15599) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.1..3.2.2-RC1` these are: + +``` + 83 Martin Odersky + 24 Yichen Xu + 23 Quentin Bernet + 18 Michael Pollmeier + 17 Dale Wijnand + 8 Fengyun Liu + 8 Paweł Marks + 7 Guillaume Raffin + 6 Som Snytt + 6 Szymon Rodziewicz + 5 Kacper Korban + 4 Chris Kipp + 3 Matt Bovel + 3 Rikito Taniguchi + 2 Ben Hutchison + 2 Florian3k + 2 Guillaume Martres + 2 Jamie Thompson + 2 João Costa + 2 Julien Richard-Foy + 2 Lukas Rytz + 2 Sébastien Doeraene + 1 Anton Sviridov + 1 Gabriel Volpe + 1 Liang Yan + 1 Noah Rosamilia + 1 Ondra Pelech + 1 Seth Tisue + 1 Tomasz Godzik + 1 Vadim Chelyshov + 1 nmc.borst + 1 nmcb + 1 olsdavis +``` diff --git a/changelogs/3.2.2-RC2.md b/changelogs/3.2.2-RC2.md new file mode 100644 index 000000000000..bf171a7e987f --- /dev/null +++ b/changelogs/3.2.2-RC2.md @@ -0,0 +1,20 @@ +# Backported fixes + +- Compute completions at `typerPhase` [#16371](https://github.com/lampepfl/dotty/pull/16371) +- Only include accessible base classes in `orDominator` [#16477](https://github.com/lampepfl/dotty/pull/16477) +- Fix curried overloading resolution for polymorphic methods [#16485](https://github.com/lampepfl/dotty/pull/16485) +- Teach SpaceEngine that generic tuples are irrefutable [#16051](https://github.com/lampepfl/dotty/pull/16051) +- Fix exhaustivity warning [#16168](https://github.com/lampepfl/dotty/pull/16168) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.2-RC1..3.2.2-RC2` these are: + +``` + 3 Martin Odersky + 3 Paweł Marks + 2 Dale Wijnand + 1 Som Snytt +``` diff --git a/changelogs/3.2.2.md b/changelogs/3.2.2.md new file mode 100644 index 000000000000..610e6ccf53b0 --- /dev/null +++ b/changelogs/3.2.2.md @@ -0,0 +1,160 @@ +# Highlights of the release + +- Make truncation by characters in repl configurable [#16167](https://github.com/lampepfl/dotty/pull/16167) +- New (experimental for now) lazy vals implementation [#15296](https://github.com/lampepfl/dotty/pull/1) + +# Other changes and fixes + +## Coverage + +- Make coverage instrumentation more robust [#16235](https://github.com/lampepfl/dotty/pull/16235) + +## Default parameters + +- Resolve overloading: keep track of prefix and indices of all default getters [#16009](https://github.com/lampepfl/dotty/pull/16009) + +# Experimentals + +- Let accesors inherit @experimental annotations from accessed symbols [#16099](https://github.com/lampepfl/dotty/pull/16099) + +## Extension Methods + +- Fix i14451 [#16010](https://github.com/lampepfl/dotty/pull/16010) + +# GADTs + +- Add fixed GADT exhaustivity test cases [#16169](https://github.com/lampepfl/dotty/pull/16169) +- Eliminate class hierarchy in GadtConstraint [#16194](https://github.com/lampepfl/dotty/pull/16194) + +## Initialization + +- Code refactoring of initialization checker [#16066](https://github.com/lampepfl/dotty/pull/16066) + +## Inline + +- Refine checking for outer references [#16122](https://github.com/lampepfl/dotty/pull/16122) + +## Java interoperability + +- Parse native in Java bytecode as @native [#16232](https://github.com/lampepfl/dotty/pull/16232) + +## Opaque Types + +- Disallow opaque type aliases of context functions [#16041](https://github.com/lampepfl/dotty/pull/16041) + +## Overloading + +- Fix curried overloading resolution for polymorphic methods [#16485](https://github.com/lampepfl/dotty/pull/16485) + +## Parser + +- Align implementation with spec of soft modifiers [#15961](https://github.com/lampepfl/dotty/pull/15961) + +## Pattern Matching + +- Fix redundancy (unreachability) warning [#16179](https://github.com/lampepfl/dotty/pull/16179) +- Teach SpaceEngine that generic tuples are irrefutable [#16051](https://github.com/lampepfl/dotty/pull/16051) +- Fix exhaustivity warning [#16168](https://github.com/lampepfl/dotty/pull/16168) + +# Polyfunctions + +- Avoid checking purity of Apply without symbols [#16221](https://github.com/lampepfl/dotty/pull/16221) + +## Reporting + +- Expose reason for unchecked warning [#16086](https://github.com/lampepfl/dotty/pull/16086) +- Refine AsSeenFrom approximation scheme [#15957](https://github.com/lampepfl/dotty/pull/15957) +- Better output under -Ydetailed-stats [#15950](https://github.com/lampepfl/dotty/pull/15950) +- Fix crash in -Vprofile code [#16007](https://github.com/lampepfl/dotty/pull/16007) +- Name stripped of module suffix must not be empty [#15597](https://github.com/lampepfl/dotty/pull/15597) +- Fix InlineMatch pos for `summonFrom` [#16025](https://github.com/lampepfl/dotty/pull/16025) + +## Rewrites + +- Rewrites are UTF-8 [#15622](https://github.com/lampepfl/dotty/pull/15622) + +## Scala-JS + +- Fix the detection of inferred types of `= js.native`. [#16184](https://github.com/lampepfl/dotty/pull/16184) +- Do not elide fields required for Scala.js interop semantics. [#16187](https://github.com/lampepfl/dotty/pull/16187) + +## Scaladoc + +- Update quick links for new ui [#15912](https://github.com/lampepfl/dotty/pull/15912) +- Avoid parsing code comment twice [#16154](https://github.com/lampepfl/dotty/pull/16154) +- Make footer text configurable [#16064](https://github.com/lampepfl/dotty/pull/16064) +- Add typography fallback fonts [#16063](https://github.com/lampepfl/dotty/pull/16063) + +## SemanticDB + +- Emit SymbolInformation and Occurrence for anonymous class [#15865](https://github.com/lampepfl/dotty/pull/15865) +- Fix regression in type apply synthetics [#16031](https://github.com/lampepfl/dotty/pull/16031) + +## Transform + +- Fix creating forwarders for simple alias givens [#16193](https://github.com/lampepfl/dotty/pull/16193) +- Only check for Scala2LocalSuffix in methods on Scala2x classes [#16014](https://github.com/lampepfl/dotty/pull/16014) +- Fix mapping TypeMaps over annotations [#15948](https://github.com/lampepfl/dotty/pull/15948) +- Peephole optimization to drop .apply from partially applied methods [#16022](https://github.com/lampepfl/dotty/pull/16022) + +## Typer + +- Fix approximateOr of (A & Double) | Null [#16241](https://github.com/lampepfl/dotty/pull/16241) +- Create boxed environments only for references and function values [#16136](https://github.com/lampepfl/dotty/pull/16136) +- Fix higher-order unification incorrectly substituting tparams [#16181](https://github.com/lampepfl/dotty/pull/16181) +- Handle copies of constrained type lambdas when determining an implicit scope [#16159](https://github.com/lampepfl/dotty/pull/16159) +- Create message for missing arguments [#16158](https://github.com/lampepfl/dotty/pull/16158) +- Make singleton types subtypes of their refined type [#16135](https://github.com/lampepfl/dotty/pull/16135) +- Disallow overriding val parameters [#16096](https://github.com/lampepfl/dotty/pull/16096) +- Avoid crash where creator proxies are referenced indirectly [#16098](https://github.com/lampepfl/dotty/pull/16098) +- Refine estimation of default arguments in overloading resolution [#15962](https://github.com/lampepfl/dotty/pull/15962) +- Harden canAssign [#15963](https://github.com/lampepfl/dotty/pull/15963) +- Fix isSubType for static objects filling in type projections [#15959](https://github.com/lampepfl/dotty/pull/15959) +- Fix expected type of TupleXXL unapply [#16248](https://github.com/lampepfl/dotty/pull/16248) +- Drop lazy recursive application in approximateParent [#16073](https://github.com/lampepfl/dotty/pull/16073) +- Type match with a match type when a match type is expected [#15599](https://github.com/lampepfl/dotty/pull/15599) +- Compute completions at `typerPhase` [#16371](https://github.com/lampepfl/dotty/pull/16371) +- Only include accessible base classes in `orDominator` [#16477](https://github.com/lampepfl/dotty/pull/16477) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.1..3.2.2` these are: + +``` + 86 Martin Odersky + 24 Yichen Xu + 23 Quentin Bernet + 19 Dale Wijnand + 18 Michael Pollmeier + 13 Paweł Marks + 8 Fengyun Liu + 7 Guillaume Raffin + 7 Som Snytt + 6 Szymon Rodziewicz + 5 Kacper Korban + 4 Chris Kipp + 3 Matt Bovel + 3 Rikito Taniguchi + 2 Ben Hutchison + 2 Florian3k + 2 Guillaume Martres + 2 Jamie Thompson + 2 João Costa + 2 Julien Richard-Foy + 2 Lukas Rytz + 2 Sébastien Doeraene + 1 Anton Sviridov + 1 Gabriel Volpe + 1 Liang Yan + 1 Noah Rosamilia + 1 Ondra Pelech + 1 Seth Tisue + 1 Tomasz Godzik + 1 Vadim Chelyshov + 1 nmc.borst + 1 nmcb + 1 olsdavis + +``` diff --git a/community-build/community-projects/akka b/community-build/community-projects/akka index ed97fe5233cb..7f5115ebc9cd 160000 --- a/community-build/community-projects/akka +++ b/community-build/community-projects/akka @@ -1 +1 @@ -Subproject commit ed97fe5233cbda2da02abad50d48c310077b313c +Subproject commit 7f5115ebc9cde408433040f11834f5218b4a3357 diff --git a/community-build/community-projects/betterfiles b/community-build/community-projects/betterfiles index 0ab941360880..d098f2799092 160000 --- a/community-build/community-projects/betterfiles +++ b/community-build/community-projects/betterfiles @@ -1 +1 @@ -Subproject commit 0ab941360880095419183309b0b9b3363eb1ad00 +Subproject commit d098f279909246243643ba3b85f3520a24c377af diff --git a/community-build/community-projects/cats-effect-3 b/community-build/community-projects/cats-effect-3 index 3a32c0e5b7b6..1d425e6efdf8 160000 --- a/community-build/community-projects/cats-effect-3 +++ b/community-build/community-projects/cats-effect-3 @@ -1 +1 @@ -Subproject commit 3a32c0e5b7b61665e5bb94ccf0ed92beb66615dd +Subproject commit 1d425e6efdf8aee619a4a906e950473c51f78161 diff --git a/community-build/community-projects/cats-mtl b/community-build/community-projects/cats-mtl index 149f002c8774..0ab7aa1cc8a0 160000 --- a/community-build/community-projects/cats-mtl +++ b/community-build/community-projects/cats-mtl @@ -1 +1 @@ -Subproject commit 149f002c8774b61df87cb846455d94ae858b3b54 +Subproject commit 0ab7aa1cc8a087693b2b04c8a9cb63f69f4af54a diff --git a/community-build/community-projects/fs2 b/community-build/community-projects/fs2 index ac5275baf33b..6d7c6d6924cb 160000 --- a/community-build/community-projects/fs2 +++ b/community-build/community-projects/fs2 @@ -1 +1 @@ -Subproject commit ac5275baf33b03da0a461b5de735ee6a1f5a524e +Subproject commit 6d7c6d6924cb055028458ac8236622190acf66d1 diff --git a/community-build/community-projects/http4s b/community-build/community-projects/http4s index c3d46f561ed1..aa85f5f2e660 160000 --- a/community-build/community-projects/http4s +++ b/community-build/community-projects/http4s @@ -1 +1 @@ -Subproject commit c3d46f561ed1026ae54e1acbd5e4730f0498ea93 +Subproject commit aa85f5f2e660d1d4370d90316333718fd6517051 diff --git a/community-build/community-projects/play-json b/community-build/community-projects/play-json index ac0fcf24a176..b2b7f8b834a4 160000 --- a/community-build/community-projects/play-json +++ b/community-build/community-projects/play-json @@ -1 +1 @@ -Subproject commit ac0fcf24a17657a7ed61be34ed792d4fd8d05293 +Subproject commit b2b7f8b834a405ec6ba5455dc345b754fab21e8f diff --git a/community-build/community-projects/protoquill b/community-build/community-projects/protoquill index 16d26fcb3072..494c2ddc06e7 160000 --- a/community-build/community-projects/protoquill +++ b/community-build/community-projects/protoquill @@ -1 +1 @@ -Subproject commit 16d26fcb30720b9aa81d29f08b9da10916e269a2 +Subproject commit 494c2ddc06e71f1c7f13b382675525130feee9a0 diff --git a/community-build/community-projects/requests-scala b/community-build/community-projects/requests-scala index 6d4a223bc33d..8e4a40588491 160000 --- a/community-build/community-projects/requests-scala +++ b/community-build/community-projects/requests-scala @@ -1 +1 @@ -Subproject commit 6d4a223bc33def14ae9a4def24a3f5c258451e8e +Subproject commit 8e4a40588491608aa40099f79c881d54a5094e75 diff --git a/community-build/community-projects/scala-parallel-collections b/community-build/community-projects/scala-parallel-collections index a6bd648bb188..7d0e41ae4d09 160000 --- a/community-build/community-projects/scala-parallel-collections +++ b/community-build/community-projects/scala-parallel-collections @@ -1 +1 @@ -Subproject commit a6bd648bb188a65ab36be07e956e52fe25f64d67 +Subproject commit 7d0e41ae4d09e1ddf063651e377921ec493fc5bf diff --git a/community-build/community-projects/scalacheck b/community-build/community-projects/scalacheck index 0ac8005753ab..fbfaabd7b628 160000 --- a/community-build/community-projects/scalacheck +++ b/community-build/community-projects/scalacheck @@ -1 +1 @@ -Subproject commit 0ac8005753ab98b6494fd631502201b97a103638 +Subproject commit fbfaabd7b628e9b0d8f78ed8a91a0672cf56ba15 diff --git a/community-build/community-projects/scalaz b/community-build/community-projects/scalaz index ee85b0925809..6e7f3d9caf64 160000 --- a/community-build/community-projects/scalaz +++ b/community-build/community-projects/scalaz @@ -1 +1 @@ -Subproject commit ee85b0925809f6e04808a6124ae04dd89adba0d6 +Subproject commit 6e7f3d9caf64d8ad1c82804cf418882345f41930 diff --git a/community-build/community-projects/specs2 b/community-build/community-projects/specs2 index e1ae96e7a55f..789f23b75db1 160000 --- a/community-build/community-projects/specs2 +++ b/community-build/community-projects/specs2 @@ -1 +1 @@ -Subproject commit e1ae96e7a55fed2268f9ccd391687a5ac96ee4df +Subproject commit 789f23b75db1cf7961d04468b21a2cc0d7ba32d8 diff --git a/community-build/community-projects/spire b/community-build/community-projects/spire index 6869620975fa..bc524eeea735 160000 --- a/community-build/community-projects/spire +++ b/community-build/community-projects/spire @@ -1 +1 @@ -Subproject commit 6869620975fa84dd1ef78c2711d6a4f8197060ae +Subproject commit bc524eeea735a3cf4d5108039f95950b024a14e4 diff --git a/community-build/community-projects/stdLib213 b/community-build/community-projects/stdLib213 index 2c742834cf16..1a2521996bad 160000 --- a/community-build/community-projects/stdLib213 +++ b/community-build/community-projects/stdLib213 @@ -1 +1 @@ -Subproject commit 2c742834cf162ab89e914bbc0a8b975874a2c3c2 +Subproject commit 1a2521996badfe4cb3d9b8cdecefacb1251faeb9 diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index 52155189a31f..fe3f5cfed5a2 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -140,7 +140,7 @@ final case class SbtCommunityProject( case Some(ivyHome) => List(s"-Dsbt.ivy.home=$ivyHome") case _ => Nil extraSbtArgs ++ sbtProps ++ List( - "-sbt-version", "1.7.1", + "-sbt-version", "1.8.0", "-Dsbt.supershell=false", s"-Ddotty.communitybuild.dir=$communitybuildDir", s"--addPluginSbtFile=$sbtPluginFilePath" diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index 3e2a8f1b0b60..e7b5a0dad1bf 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -4,7 +4,7 @@ package jvm import scala.language.unsafeNulls -import scala.annotation.switch +import scala.annotation.{switch, tailrec} import scala.collection.mutable.SortedMap import scala.tools.asm @@ -23,6 +23,7 @@ import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.util.Spans._ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Phases._ +import dotty.tools.dotc.core.Decorators.em import dotty.tools.dotc.report /* @@ -78,9 +79,14 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { tree match { case Assign(lhs @ DesugaredSelect(qual, _), rhs) => + val savedStackHeight = stackHeight val isStatic = lhs.symbol.isStaticMember - if (!isStatic) { genLoadQualifier(lhs) } + if (!isStatic) { + genLoadQualifier(lhs) + stackHeight += 1 + } genLoad(rhs, symInfoTK(lhs.symbol)) + stackHeight = savedStackHeight lineNumber(tree) // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError val receiverClass = qual.tpe.typeSymbol @@ -144,7 +150,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } genLoad(larg, resKind) + stackHeight += resKind.size genLoad(rarg, if (isShift) INT else resKind) + stackHeight -= resKind.size (code: @switch) match { case ADD => bc add resKind @@ -181,14 +189,19 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { if (isArrayGet(code)) { // load argument on stack assert(args.length == 1, s"Too many arguments for array get operation: $tree"); + stackHeight += 1 genLoad(args.head, INT) + stackHeight -= 1 generatedType = k.asArrayBType.componentType bc.aload(elementType) } else if (isArraySet(code)) { val List(a1, a2) = args + stackHeight += 1 genLoad(a1, INT) + stackHeight += 1 genLoad(a2) + stackHeight -= 2 generatedType = UNIT bc.astore(elementType) } else { @@ -222,7 +235,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val resKind = if (hasUnitBranch) UNIT else tpeTK(tree) val postIf = new asm.Label - genLoadTo(thenp, resKind, LoadDestination.Jump(postIf)) + genLoadTo(thenp, resKind, LoadDestination.Jump(postIf, stackHeight)) markProgramPoint(failure) genLoadTo(elsep, resKind, LoadDestination.FallThrough) markProgramPoint(postIf) @@ -481,7 +494,17 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { dest match case LoadDestination.FallThrough => () - case LoadDestination.Jump(label) => + case LoadDestination.Jump(label, targetStackHeight) => + if targetStackHeight < stackHeight then + val stackDiff = stackHeight - targetStackHeight + if expectedType == UNIT then + bc dropMany stackDiff + else + val loc = locals.makeTempLocal(expectedType) + bc.store(loc.idx, expectedType) + bc dropMany stackDiff + bc.load(loc.idx, expectedType) + end if bc goTo label case LoadDestination.Return => bc emitRETURN returnType @@ -576,7 +599,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { if dest == LoadDestination.FallThrough then val resKind = tpeTK(tree) val jumpTarget = new asm.Label - registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget)) + registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget, stackHeight)) genLoad(expr, resKind) markProgramPoint(jumpTarget) resKind @@ -634,7 +657,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { markProgramPoint(loop) if isInfinite then - val dest = LoadDestination.Jump(loop) + val dest = LoadDestination.Jump(loop, stackHeight) genLoadTo(body, UNIT, dest) dest else @@ -649,7 +672,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val failure = new asm.Label genCond(cond, success, failure, targetIfNoJump = success) markProgramPoint(success) - genLoadTo(body, UNIT, LoadDestination.Jump(loop)) + genLoadTo(body, UNIT, LoadDestination.Jump(loop, stackHeight)) markProgramPoint(failure) end match LoadDestination.FallThrough @@ -700,7 +723,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { var elemKind = arr.elementType val argsSize = args.length if (argsSize > dims) { - report.error(s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)", ctx.source.atSpan(app.span)) + report.error(em"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)", ctx.source.atSpan(app.span)) } if (argsSize < dims) { /* In one step: @@ -743,7 +766,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { // scala/bug#10290: qual can be `this.$outer()` (not just `this`), so we call genLoad (not just ALOAD_0) genLoad(superQual) + stackHeight += 1 genLoadArguments(args, paramTKs(app)) + stackHeight -= 1 generatedType = genCallMethod(fun.symbol, InvokeStyle.Super, app.span) // 'new' constructor call: Note: since constructors are @@ -765,7 +790,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { assert(classBTypeFromSymbol(ctor.owner) == rt, s"Symbol ${ctor.owner.showFullName} is different from $rt") mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName) bc dup generatedType + stackHeight += 2 genLoadArguments(args, paramTKs(app)) + stackHeight -= 2 genCallMethod(ctor, InvokeStyle.Special, app.span) case _ => @@ -798,8 +825,12 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { else if (app.hasAttachment(BCodeHelpers.UseInvokeSpecial)) InvokeStyle.Special else InvokeStyle.Virtual - if (invokeStyle.hasInstance) genLoadQualifier(fun) + val savedStackHeight = stackHeight + if invokeStyle.hasInstance then + genLoadQualifier(fun) + stackHeight += 1 genLoadArguments(args, paramTKs(app)) + stackHeight = savedStackHeight val DesugaredSelect(qual, name) = fun: @unchecked // fun is a Select, also checked in genLoadQualifier val isArrayClone = name == nme.clone_ && qual.tpe.widen.isInstanceOf[JavaArrayType] @@ -857,6 +888,8 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { bc iconst elems.length bc newarray elmKind + stackHeight += 3 // during the genLoad below, there is the result, its dup, and the index + var i = 0 var rest = elems while (!rest.isEmpty) { @@ -868,6 +901,8 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { i = i + 1 } + stackHeight -= 3 + generatedType } @@ -882,7 +917,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val (generatedType, postMatch, postMatchDest) = if dest == LoadDestination.FallThrough then val postMatch = new asm.Label - (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch)) + (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch, stackHeight)) else (expectedType, null, dest) @@ -1159,14 +1194,21 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } def genLoadArguments(args: List[Tree], btpes: List[BType]): Unit = - args match - case arg :: args1 => - btpes match - case btpe :: btpes1 => - genLoad(arg, btpe) - genLoadArguments(args1, btpes1) - case _ => - case _ => + @tailrec def loop(args: List[Tree], btpes: List[BType]): Unit = + args match + case arg :: args1 => + btpes match + case btpe :: btpes1 => + genLoad(arg, btpe) + stackHeight += btpe.size + loop(args1, btpes1) + case _ => + case _ => + + val savedStackHeight = stackHeight + loop(args, btpes) + stackHeight = savedStackHeight + end genLoadArguments def genLoadModule(tree: Tree): BType = { val module = ( @@ -1255,7 +1297,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { .toList // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower - if (classfileVersion < asm.Opcodes.V9) { + if (backendUtils.classfileVersion < asm.Opcodes.V9) { // Estimate capacity needed for the string builder val approxBuilderSize = concatArguments.view.map { @@ -1265,11 +1307,14 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { }.sum bc.genNewStringBuilder(approxBuilderSize) + stackHeight += 1 // during the genLoad below, there is a reference to the StringBuilder on the stack for (elem <- concatArguments) { val elemType = tpeTK(elem) genLoad(elem, elemType) bc.genStringBuilderAppend(elemType) } + stackHeight -= 1 + bc.genStringBuilderEnd } else { @@ -1286,12 +1331,15 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { var totalArgSlots = 0 var countConcats = 1 // ie. 1 + how many times we spilled + val savedStackHeight = stackHeight + for (elem <- concatArguments) { val tpe = tpeTK(elem) val elemSlots = tpe.size // Unlikely spill case if (totalArgSlots + elemSlots >= MaxIndySlots) { + stackHeight = savedStackHeight + countConcats bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) countConcats += 1 totalArgSlots = 0 @@ -1316,8 +1364,10 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val tpe = tpeTK(elem) argTypes += tpe.toASMType genLoad(elem, tpe) + stackHeight += 1 } } + stackHeight = savedStackHeight bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) // If we spilled, generate one final concat @@ -1512,7 +1562,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } else { val tk = tpeTK(l).maxType(tpeTK(r)) genLoad(l, tk) + stackHeight += tk.size genLoad(r, tk) + stackHeight -= tk.size genCJUMP(success, failure, op, tk, targetIfNoJump) } } @@ -1627,7 +1679,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } genLoad(l, ObjectRef) + stackHeight += 1 genLoad(r, ObjectRef) + stackHeight -= 1 genCallMethod(equalsMethod, InvokeStyle.Static) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) } @@ -1643,7 +1697,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } else if (isNonNullExpr(l)) { // SI-7852 Avoid null check if L is statically non-null. genLoad(l, ObjectRef) + stackHeight += 1 genLoad(r, ObjectRef) + stackHeight -= 1 genCallMethod(defn.Any_equals, InvokeStyle.Virtual) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) } else { @@ -1653,7 +1709,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val lNonNull = new asm.Label genLoad(l, ObjectRef) + stackHeight += 1 genLoad(r, ObjectRef) + stackHeight -= 1 locals.store(eqEqTempLocal) bc dup ObjectRef genCZJUMP(lNull, lNonNull, Primitives.EQ, ObjectRef, targetIfNoJump = lNull) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index b6d898b3b221..c36c8c546635 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -42,18 +42,19 @@ import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions * @version 1.0 * */ -trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { +trait BCodeHelpers extends BCodeIdiomatic { // for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce - //import global._ - //import bTypes._ - //import coreBTypes._ import bTypes._ import tpd._ import coreBTypes._ import int.{_, given} import DottyBackendInterface._ + // We need to access GenBCode phase to get access to post-processor components. + // At this point it should always be initialized already. + protected lazy val backendUtils = genBCodePhase.asInstanceOf[GenBCode].postProcessor.backendUtils + def ScalaATTRName: String = "Scala" def ScalaSignatureATTRName: String = "ScalaSig" @@ -61,100 +62,15 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { @threadUnsafe lazy val AnnotationRetentionSourceAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("SOURCE") @threadUnsafe lazy val AnnotationRetentionClassAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("CLASS") @threadUnsafe lazy val AnnotationRetentionRuntimeAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("RUNTIME") - @threadUnsafe lazy val JavaAnnotationClass: ClassSymbol = requiredClass("java.lang.annotation.Annotation") val bCodeAsmCommon: BCodeAsmCommon[int.type] = new BCodeAsmCommon(int) - /* - * must-single-thread - */ - def getFileForClassfile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - getFile(base, clsName, suffix) - } - - /* - * must-single-thread - */ - def getOutFolder(csym: Symbol, cName: String): AbstractFile = { - try { - outputDirectory - } catch { - case ex: Throwable => - report.error(s"Couldn't create file for class $cName\n${ex.getMessage}", ctx.source.atSpan(csym.span)) - null - } - } - final def traitSuperAccessorName(sym: Symbol): String = { val nameString = sym.javaSimpleName.toString if (sym.name == nme.TRAIT_CONSTRUCTOR) nameString else nameString + "$" } - // ----------------------------------------------------------------------------------------- - // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) - // Background: - // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf - // http://comments.gmane.org/gmane.comp.java.vm.languages/2293 - // https://issues.scala-lang.org/browse/SI-3872 - // ----------------------------------------------------------------------------------------- - - /* An `asm.ClassWriter` that uses `jvmWiseLUB()` - * The internal name of the least common ancestor of the types given by inameA and inameB. - * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow - */ - final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) { - - /** - * This method is thread-safe: it depends only on the BTypes component, which does not depend - * on global. TODO @lry move to a different place where no global is in scope, on bTypes. - */ - override def getCommonSuperClass(inameA: String, inameB: String): String = { - val a = classBTypeFromInternalName(inameA) - val b = classBTypeFromInternalName(inameB) - val lub = a.jvmWiseLUB(b) - val lubName = lub.internalName - assert(lubName != "scala/Any") - lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. - } - } - - /* - * must-single-thread - */ - def initBytecodeWriter(): BytecodeWriter = { - (None: Option[AbstractFile] /*getSingleOutput*/) match { // todo: implement - case Some(f) if f.hasExtension("jar") => - new DirectToJarfileWriter(f.file) - case _ => - factoryNonJarBytecodeWriter() - } - } - - /* - * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner - * classes in BTypes.scala. - * - * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of - * each inner class it lists (those are looked up and included). - * - * This method serializes in the InnerClasses JVM attribute in an appropriate order, - * not necessarily that given by `refedInnerClasses`. - * - * can-multi-thread - */ - final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { - // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler - val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) - allNestedClasses ++= declaredInnerClasses - refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) - for nestedClass <- allNestedClasses - do { - // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. - val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked - jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) - } - } /* * can-multi-thread @@ -415,7 +331,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { arrAnnotV.visitEnd() } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape. */ - case t @ Apply(constr, args) if t.tpe.derivesFrom(JavaAnnotationClass) => + case t @ Apply(constr, args) if t.tpe.classSymbol.is(JavaAnnotation) => val typ = t.tpe.classSymbol.denot.info val assocs = assocsFromApply(t) val desc = innerClasesStore.typeDescriptor(typ) // the class descriptor of the nested annotation class @@ -423,7 +339,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { emitAssocs(nestedVisitor, assocs, bcodeStore)(innerClasesStore) case t => - report.error(ex"Annotation argument is not a constant", t.sourcePos) + report.error(em"Annotation argument is not a constant", t.sourcePos) } } @@ -681,7 +597,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { val mirrorClass = new asm.tree.ClassNode mirrorClass.visit( - classfileVersion, + backendUtils.classfileVersion, bType.info.flags, mirrorName, null /* no java-generic-signature */, @@ -872,10 +788,11 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { try body catch { case ex: Throwable => - report.error(i"""|compiler bug: created invalid generic signature for $sym in ${sym.denot.owner.showFullName} - |signature: $sig - |if this is reproducible, please report bug at https://github.com/lampepfl/dotty/issues - """.trim, sym.sourcePos) + report.error( + em"""|compiler bug: created invalid generic signature for $sym in ${sym.denot.owner.showFullName} + |signature: $sig + |if this is reproducible, please report bug at https://github.com/lampepfl/dotty/issues + """, sym.sourcePos) throw ex } } diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala index 02268c2919ba..42f8ef7f4ef6 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala @@ -19,51 +19,13 @@ import dotty.tools.dotc.report */ trait BCodeIdiomatic { val int: DottyBackendInterface - final lazy val bTypes = new BTypesFromSymbols[int.type](int) + val bTypes: BTypesFromSymbols[int.type] import int.{_, given} import bTypes._ import coreBTypes._ - - lazy val target = - val releaseValue = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) - val targetValue = Option(ctx.settings.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) - val defaultTarget = "8" - (releaseValue, targetValue) match - case (Some(release), None) => release - case (None, Some(target)) => target - case (Some(release), Some(_)) => - report.warning(s"The value of ${ctx.settings.XuncheckedJavaOutputVersion.name} was overridden by ${ctx.settings.javaOutputVersion.name}") - release - case (None, None) => "8" // least supported version by default - - - // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings - lazy val classfileVersion: Int = target match { - case "8" => asm.Opcodes.V1_8 - case "9" => asm.Opcodes.V9 - case "10" => asm.Opcodes.V10 - case "11" => asm.Opcodes.V11 - case "12" => asm.Opcodes.V12 - case "13" => asm.Opcodes.V13 - case "14" => asm.Opcodes.V14 - case "15" => asm.Opcodes.V15 - case "16" => asm.Opcodes.V16 - case "17" => asm.Opcodes.V17 - case "18" => asm.Opcodes.V18 - case "19" => asm.Opcodes.V19 - } - - lazy val majorVersion: Int = (classfileVersion & 0xFF) - lazy val emitStackMapFrame = (majorVersion >= 50) - - val extraProc: Int = - import GenBCodeOps.addFlagIf - asm.ClassWriter.COMPUTE_MAXS - .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) - lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName val CLASS_CONSTRUCTOR_NAME = "" @@ -619,6 +581,16 @@ trait BCodeIdiomatic { // can-multi-thread final def drop(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) } + // can-multi-thread + final def dropMany(size: Int): Unit = { + var s = size + while s >= 2 do + emit(Opcodes.POP2) + s -= 2 + if s > 0 then + emit(Opcodes.POP) + } + // can-multi-thread final def dup(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) } diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index a524d5fb5a8b..0a11fb898b48 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -45,7 +45,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { /** The value is put on the stack, and control flows through to the next opcode. */ case FallThrough /** The value is put on the stack, and control flow is transferred to the given `label`. */ - case Jump(label: asm.Label) + case Jump(label: asm.Label, targetStackHeight: Int) /** The value is RETURN'ed from the enclosing method. */ case Return /** The value is ATHROW'n. */ @@ -151,7 +151,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { // !!! Part of this logic is duplicated in JSCodeGen.genCompilationUnit claszSymbol.info.decls.foreach { f => - if f.isField && !f.name.is(LazyBitMapName) then + if f.isField && !f.name.is(LazyBitMapName) && !f.name.is(LazyLocalName) then f.setFlag(JavaStatic) } @@ -271,7 +271,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { val flags = javaFlags(claszSymbol) val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner) - cnode.visit(classfileVersion, flags, + cnode.visit(backendUtils.classfileVersion, flags, thisName, thisSignature, superClass, interfaceNames.toArray) @@ -368,6 +368,8 @@ trait BCodeSkelBuilder extends BCodeHelpers { // used by genLoadTry() and genSynchronized() var earlyReturnVar: Symbol = null var shouldEmitCleanup = false + // stack tracking + var stackHeight = 0 // line numbers var lastEmittedLineNr = -1 @@ -504,6 +506,13 @@ trait BCodeSkelBuilder extends BCodeHelpers { loc } + def makeTempLocal(tk: BType): Local = + assert(nxtIdx != -1, "not a valid start index") + assert(tk.size > 0, "makeLocal called for a symbol whose type is Unit.") + val loc = Local(tk, "temp", nxtIdx, isSynth = true) + nxtIdx += tk.size + loc + // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol. def store(locSym: Symbol): Unit = { val Local(tk, _, idx, _) = slots(locSym) @@ -547,11 +556,17 @@ trait BCodeSkelBuilder extends BCodeHelpers { case _ => false } ) } def lineNumber(tree: Tree): Unit = { + @tailrec + def getNonLabelNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match { + case a: asm.tree.LabelNode => getNonLabelNode(a.getPrevious) + case _ => a + } + if (!emitLines || !tree.span.exists) return; val nr = ctx.source.offsetToLine(tree.span.point) + 1 if (nr != lastEmittedLineNr) { lastEmittedLineNr = nr - lastInsn match { + getNonLabelNode(lastInsn) match { case lnn: asm.tree.LineNumberNode => // overwrite previous landmark as no instructions have been emitted for it lnn.line = nr @@ -574,6 +589,8 @@ trait BCodeSkelBuilder extends BCodeHelpers { earlyReturnVar = null shouldEmitCleanup = false + stackHeight = 0 + lastEmittedLineNr = -1 } @@ -748,7 +765,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { if (params.size > MaximumJvmParameters) { // SI-7324 - report.error(s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.", ctx.source.atSpan(methSymbol.span)) + report.error(em"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.", ctx.source.atSpan(methSymbol.span)) return } @@ -800,9 +817,10 @@ trait BCodeSkelBuilder extends BCodeHelpers { val veryFirstProgramPoint = currProgramPoint() if trimmedRhs == tpd.EmptyTree then - report.error("Concrete method has no definition: " + dd + ( - if (ctx.settings.Ydebug.value) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" - else ""), + report.error( + em"Concrete method has no definition: $dd${ + if (ctx.settings.Ydebug.value) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" + else ""}", ctx.source.atSpan(NoSpan) ) else diff --git a/compiler/src/dotty/tools/backend/jvm/BTypes.scala b/compiler/src/dotty/tools/backend/jvm/BTypes.scala index 57bd343b6658..5539bf44aa17 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypes.scala @@ -14,7 +14,9 @@ import scala.tools.asm * This representation is immutable and independent of the compiler data structures, hence it can * be queried by concurrent threads. */ -abstract class BTypes { +abstract class BTypes { self => + val frontendAccess: PostProcessorFrontendAccess + import frontendAccess.{frontendSynch} val int: DottyBackendInterface import int.given @@ -37,10 +39,7 @@ abstract class BTypes { */ def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName) - // Some core BTypes are required here, in class BType, where no Global instance is available. - // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual - // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol. - val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type] + val coreBTypes: CoreBTypes { val bTypes: self.type} import coreBTypes._ /** @@ -862,3 +861,12 @@ abstract class BTypes { */ /*final*/ case class MethodNameAndType(name: String, methodType: MethodBType) } + +object BTypes { + /** + * A marker for strings that represent class internal names. + * Ideally the type would be incompatible with String, for example by making it a value class. + * But that would create overhead in a Collection[InternalName]. + */ + type InternalName = String +} diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index 54dafe6f0032..884dd19ee64f 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -14,20 +14,14 @@ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.core.Phases /** * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary * information from a symbol and its type to create the corresponding ClassBType. It requires * access to the compiler (global parameter). - * - * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes - * uses classBTypeFromSymbol, hence requires access to the compiler (global). - * - * BTypesFromSymbols extends BTypes because the implementation of BTypes requires access to some - * of the core btypes. They are declared in BTypes as abstract members. Note that BTypes does - * not have access to the compiler instance. */ -class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { +class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAccess: PostProcessorFrontendAccess) extends BTypes { import int.{_, given} import DottyBackendInterface.{symExtensions, _} @@ -37,39 +31,18 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int) import bCodeAsmCommon._ - // Why the proxy, see documentation of class [[CoreBTypes]]. - val coreBTypes: CoreBTypesProxy[this.type] = new CoreBTypesProxy[this.type](this) - import coreBTypes._ - - final def intializeCoreBTypes(): Unit = { - coreBTypes.setBTypes(new CoreBTypes[this.type](this)) - } - - private[this] val perRunCaches: Caches = new Caches { - def newAnyRefMap[K <: AnyRef, V](): mutable.AnyRefMap[K, V] = new mutable.AnyRefMap[K, V]() - def newWeakMap[K, V](): mutable.WeakHashMap[K, V] = new mutable.WeakHashMap[K, V]() - def recordCache[T <: Clearable](cache: T): T = cache - def newMap[K, V](): mutable.HashMap[K, V] = new mutable.HashMap[K, V]() - def newSet[K](): mutable.Set[K] = new mutable.HashSet[K] - } - - // TODO remove abstraction - private abstract class Caches { - def recordCache[T <: Clearable](cache: T): T - def newWeakMap[K, V](): collection.mutable.WeakHashMap[K, V] - def newMap[K, V](): collection.mutable.HashMap[K, V] - def newSet[K](): collection.mutable.Set[K] - def newAnyRefMap[K <: AnyRef, V](): collection.mutable.AnyRefMap[K, V] + val coreBTypes = new CoreBTypesFromSymbols[I]{ + val bTypes: BTypesFromSymbols.this.type = BTypesFromSymbols.this } + import coreBTypes._ - @threadUnsafe protected lazy val classBTypeFromInternalNameMap = { - perRunCaches.recordCache(collection.concurrent.TrieMap.empty[String, ClassBType]) - } + @threadUnsafe protected lazy val classBTypeFromInternalNameMap = + collection.concurrent.TrieMap.empty[String, ClassBType] /** * Cache for the method classBTypeFromSymbol. */ - @threadUnsafe private lazy val convertedClasses = perRunCaches.newMap[Symbol, ClassBType]() + @threadUnsafe private lazy val convertedClasses = collection.mutable.HashMap.empty[Symbol, ClassBType] /** * The ClassBType for a class symbol `sym`. diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala new file mode 100644 index 000000000000..d54364b1675f --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -0,0 +1,181 @@ +package dotty.tools.backend.jvm + +import scala.tools.asm +import scala.tools.asm.Handle +import scala.tools.asm.tree.InvokeDynamicInsnNode +import asm.tree.ClassNode +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import dotty.tools.dotc.report + +import scala.language.unsafeNulls + +/** + * This component hosts tools and utilities used in the backend that require access to a `BTypes` + * instance. + */ +class BackendUtils(val postProcessor: PostProcessor) { + import postProcessor.{bTypes, frontendAccess} + import frontendAccess.{compilerSettings} + import bTypes.* + import coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle + + // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings + lazy val classfileVersion: Int = compilerSettings.target match { + case "8" => asm.Opcodes.V1_8 + case "9" => asm.Opcodes.V9 + case "10" => asm.Opcodes.V10 + case "11" => asm.Opcodes.V11 + case "12" => asm.Opcodes.V12 + case "13" => asm.Opcodes.V13 + case "14" => asm.Opcodes.V14 + case "15" => asm.Opcodes.V15 + case "16" => asm.Opcodes.V16 + case "17" => asm.Opcodes.V17 + case "18" => asm.Opcodes.V18 + case "19" => asm.Opcodes.V19 + case "20" => asm.Opcodes.V20 + } + + lazy val extraProc: Int = { + import GenBCodeOps.addFlagIf + val majorVersion: Int = (classfileVersion & 0xFF) + val emitStackMapFrame = (majorVersion >= 50) + asm.ClassWriter.COMPUTE_MAXS + .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) + } + + def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { + val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] + for (m <- classNode.methods.asScala) { + val iter = m.instructions.iterator + while (iter.hasNext) { + val insn = iter.next() + insn match { + case indy: InvokeDynamicInsnNode + if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => + import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE + val metafactoryFlags = indy.bsmArgs(3).asInstanceOf[Integer].toInt + val isSerializable = (metafactoryFlags & FLAG_SERIALIZABLE) != 0 + if isSerializable then + val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] + indyLambdaBodyMethods += implMethod + case _ => + } + } + } + indyLambdaBodyMethods.toArray + } + + /* + * Add: + * + * private static Object $deserializeLambda$(SerializedLambda l) { + * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$0](l) + * catch { + * case i: IllegalArgumentException => + * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$1](l) + * catch { + * case i: IllegalArgumentException => + * ... + * return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup${NUM_GROUPS-1}](l) + * } + * + * We use invokedynamic here to enable caching within the deserializer without needing to + * host a static field in the enclosing class. This allows us to add this method to interfaces + * that define lambdas in default methods. + * + * SI-10232 we can't pass arbitrary number of method handles to the final varargs parameter of the bootstrap + * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target + * methods. + */ + def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { + import asm.Opcodes._ + import bTypes._ + import coreBTypes._ + + val cw = classNode + + // Make sure to reference the ClassBTypes of all types that are used in the code generated + // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to + // `classBTypeFromInternalNameMap`. When writing the classfile, the asm ClassWriter computes + // stack map frames and invokes the `getCommonSuperClass` method. This method expects all + // ClassBTypes mentioned in the source code to exist in the map. + + val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor + + val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) + def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { + mv.visitVarInsn(ALOAD, 0) + mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) + } + + val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java + val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray + val numGroups = groups.length + + import scala.tools.asm.Label + val initialLabels = Array.fill(numGroups - 1)(new Label()) + val terminalLabel = new Label + def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) + + for ((label, i) <- initialLabels.iterator.zipWithIndex) { + mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) + } + for ((label, i) <- initialLabels.iterator.zipWithIndex) { + mv.visitLabel(label) + emitLambdaDeserializeIndy(groups(i).toIndexedSeq) + mv.visitInsn(ARETURN) + } + mv.visitLabel(terminalLabel) + emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) + mv.visitInsn(ARETURN) + } + + /** + * Visit the class node and collect all referenced nested classes. + */ + def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { + // type InternalName = String + val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { + def declaredNestedClasses(internalName: InternalName): List[ClassBType] = + bTypes.classBTypeFromInternalName(internalName).info.memberClasses + + def getClassIfNested(internalName: InternalName): Option[ClassBType] = { + val c = bTypes.classBTypeFromInternalName(internalName) + Option.when(c.isNestedClass)(c) + } + + def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { + // don't crash on invalid generic signatures + } + } + c.visit(classNode) + (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) + } + + /* + * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner + * classes in BTypes.scala. + * + * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of + * each inner class it lists (those are looked up and included). + * + * This method serializes in the InnerClasses JVM attribute in an appropriate order, + * not necessarily that given by `refedInnerClasses`. + * + * can-multi-thread + */ + final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { + // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler + val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) + allNestedClasses ++= declaredInnerClasses + refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) + for nestedClass <- allNestedClasses + do { + // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. + val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked + jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) + } + } +} diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala new file mode 100644 index 000000000000..08e84de92dca --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala @@ -0,0 +1,142 @@ +package dotty.tools.backend.jvm + +import java.io.{DataOutputStream, IOException, PrintWriter, StringWriter} +import java.nio.file.Files +import java.util.jar.Attributes.Name + +import scala.tools.asm.ClassReader +import scala.tools.asm.tree.ClassNode +import dotty.tools.io.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.util.NoSourcePosition +import java.nio.charset.StandardCharsets +import java.nio.channels.ClosedByInterruptException +import BTypes.InternalName +import scala.language.unsafeNulls + +class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { + import frontendAccess.{backendReporting, compilerSettings} + + // if non-null, classfiles are additionally written to this directory + private val dumpOutputDir: AbstractFile = getDirectoryOrNull(compilerSettings.dumpClassesDirectory) + + // if non-null, classfiles are written to a jar instead of the output directory + private val jarWriter: JarWriter | Null = compilerSettings.outputDirectory match { + case jar: JarArchive => + val mainClass = compilerSettings.mainClass.orElse { + // If no main class was specified, see if there's only one + // entry point among the classes going into the jar. + frontendAccess.getEntryPoints match { + case name :: Nil => + backendReporting.log(i"Unique entry point: setting Main-Class to $name") + Some(name) + case names => + if names.isEmpty then backendReporting.warning(em"No Main-Class designated or discovered.") + else backendReporting.warning(em"No Main-Class due to multiple entry points:\n ${names.mkString("\n ")}") + None + } + } + jar.underlyingSource.map{ source => + if jar.isEmpty then + val jarMainAttrs = mainClass.map(Name.MAIN_CLASS -> _).toList + new Jar(source.file).jarWriter(jarMainAttrs: _*) + else + // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where + // created using `AbstractFile.bufferedOutputStream`instead of JarWritter + backendReporting.warning(em"Tried to write to non-empty JAR: $source") + null + }.orNull + + case _ => null + } + + private def getDirectoryOrNull(dir: Option[String]): AbstractFile = + dir.map(d => new PlainDirectory(Directory(d))).orNull + + private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { + if (base.file != null) { + fastGetFile(base, clsName, suffix) + } else { + def ensureDirectory(dir: AbstractFile): AbstractFile = + if (dir.isDirectory) dir + else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) + var dir = base + val pathParts = clsName.split("[./]").toList + for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part + ensureDirectory(dir) fileNamed pathParts.last + suffix + } + } + + private def fastGetFile(base: AbstractFile, clsName: String, suffix: String) = { + val index = clsName.lastIndexOf('/') + val (packageName, simpleName) = if (index > 0) { + (clsName.substring(0, index), clsName.substring(index + 1)) + } else ("", clsName) + val directory = base.file.toPath.resolve(packageName) + new PlainFile(Path(directory.resolve(simpleName + suffix))) + } + + private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { + if (outFile.file != null) { + val outPath = outFile.file.toPath + try Files.write(outPath, bytes) + catch { + case _: java.nio.file.NoSuchFileException => + Files.createDirectories(outPath.getParent) + Files.write(outPath, bytes) + } + } else { + val out = new DataOutputStream(outFile.bufferedOutput) + try out.write(bytes, 0, bytes.length) + finally out.close() + } + } + + def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile | Null = try { + // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) + val outFile = writeToJarOrFile(className, bytes, ".class") + // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) + + if (dumpOutputDir != null) { + val dumpFile = getFile(dumpOutputDir, className, ".class") + writeBytes(dumpFile, bytes) + } + outFile + } catch { + case e: FileConflictException => + backendReporting.error(em"error writing $className: ${e.getMessage}") + null + case e: java.nio.file.FileSystemException => + if compilerSettings.debug then e.printStackTrace() + backendReporting.error(em"error writing $className: ${e.getClass.getName} ${e.getMessage}") + null + } + + def writeTasty(className: InternalName, bytes: Array[Byte]): Unit = + writeToJarOrFile(className, bytes, ".tasty") + + private def writeToJarOrFile(className: InternalName, bytes: Array[Byte], suffix: String): AbstractFile | Null = { + if jarWriter == null then + val outFolder = compilerSettings.outputDirectory + val outFile = getFile(outFolder, className, suffix) + try writeBytes(outFile, bytes) + catch case ex: ClosedByInterruptException => + try outFile.delete() // don't leave an empty or half-written files around after an interrupt + catch case _: Throwable => () + finally throw ex + outFile + else + val path = className + suffix + val out = jarWriter.newOutputStream(path) + try out.write(bytes, 0, bytes.length) + finally out.flush() + null + } + + def close(): Unit = { + if (jarWriter != null) jarWriter.close() + } +} + +/** Can't output a file due to the state of the file system. */ +class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala new file mode 100644 index 000000000000..c9f9e4e23d90 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -0,0 +1,181 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Phases.Phase + +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.interfaces +import dotty.tools.dotc.report + +import java.util.Optional +import dotty.tools.dotc.sbt.ExtractDependencies +import dotty.tools.dotc.core._ +import Contexts._ +import Phases._ +import Symbols._ +import StdNames.nme + +import java.io.DataOutputStream +import java.nio.channels.ClosedByInterruptException + +import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } + +import scala.tools.asm +import scala.tools.asm.tree._ +import tpd._ +import dotty.tools.io.AbstractFile +import dotty.tools.dotc.util.NoSourcePosition + + +class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( val bTypes: BTypesFromSymbols[int.type]) { self => + import DottyBackendInterface.symExtensions + import bTypes._ + import int.given + + private lazy val mirrorCodeGen = Impl.JMirrorBuilder() + + def genUnit(unit: CompilationUnit): GeneratedDefs = { + val generatedClasses = mutable.ListBuffer.empty[GeneratedClass] + val generatedTasty = mutable.ListBuffer.empty[GeneratedTasty] + + def genClassDef(cd: TypeDef): Unit = + try + val sym = cd.symbol + val sourceFile = unit.source.file + + def registerGeneratedClass(classNode: ClassNode, isArtifact: Boolean): Unit = + generatedClasses += GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated(classNode, sym, unit.source)) + + val plainC = genClass(cd, unit) + registerGeneratedClass(plainC, isArtifact = false) + + val attrNode = + if !sym.isTopLevelModuleClass then plainC + else if sym.companionClass == NoSymbol then + val mirrorC = genMirrorClass(sym, unit) + registerGeneratedClass(mirrorC, isArtifact = true) + mirrorC + else + report.log(s"No mirror class for module with linked class: ${sym.fullName}", NoSourcePosition) + plainC + + if sym.isClass then + genTastyAndSetAttributes(sym, attrNode) + catch + case ex: Throwable => + ex.printStackTrace() + report.error(s"Error while emitting ${unit.source}\n${ex.getMessage}", NoSourcePosition) + + + def genTastyAndSetAttributes(claszSymbol: Symbol, store: ClassNode): Unit = + import Impl.createJAttribute + for (binary <- unit.pickled.get(claszSymbol.asClass)) { + generatedTasty += GeneratedTasty(store, binary) + val tasty = + val uuid = new TastyHeaderUnpickler(binary()).readHeader() + val lo = uuid.getMostSignificantBits + val hi = uuid.getLeastSignificantBits + + // TASTY attribute is created but only the UUID bytes are stored in it. + // A TASTY attribute has length 16 if and only if the .tasty file exists. + val buffer = new TastyBuffer(16) + buffer.writeUncompressedLong(lo) + buffer.writeUncompressedLong(hi) + buffer.bytes + + val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) + store.visitAttribute(dataAttr) + } + + def genClassDefs(tree: Tree): Unit = + tree match { + case EmptyTree => () + case PackageDef(_, stats) => stats foreach genClassDefs + case ValDef(_, _, _) => () // module val not emitted + case td: TypeDef => genClassDef(td) + } + + genClassDefs(unit.tpdTree) + GeneratedDefs(generatedClasses.toList, generatedTasty.toList) + } + + // Creates a callback that will be evaluated in PostProcessor after creating a file + private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: interfaces.SourceFile): AbstractFile => Unit = clsFile => { + val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { + (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) + } + + val className = cls.name.replace('/', '.') + if (ctx.compilerCallback != null) + ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) + + if (ctx.sbtCallback != null) { + val jSourceFile = sourceFile.jfile.orElse(null) + val cb = ctx.sbtCallback + if (isLocal) cb.generatedLocalClass(jSourceFile, clsFile.file) + else cb.generatedNonLocalClass(jSourceFile, clsFile.file, className, fullClassName) + } + } + + /** Convert a `dotty.tools.io.AbstractFile` into a + * `dotty.tools.dotc.interfaces.AbstractFile`. + */ + private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = + new interfaces.AbstractFile { + override def name = absfile.name + override def path = absfile.path + override def jfile = Optional.ofNullable(absfile.file) + } + + private def genClass(cd: TypeDef, unit: CompilationUnit): ClassNode = { + val b = new Impl.PlainClassBuilder(unit) + b.genPlainClass(cd) + val cls = b.cnode + checkForCaseConflict(cls.name, cd.symbol) + cls + } + + private def genMirrorClass(classSym: Symbol, unit: CompilationUnit): ClassNode = { + val cls = mirrorCodeGen.genMirrorClass(classSym, unit) + checkForCaseConflict(cls.name, classSym) + cls + } + + private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] + private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { + val lowerCaseName = javaClassName.toLowerCase + lowerCaseNames.get(lowerCaseName) match { + case None => + lowerCaseNames.put(lowerCaseName, classSymbol) + case Some(dupClassSym) => + // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting + val (cl1, cl2) = + if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) + else (dupClassSym, classSymbol) + val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString + atPhase(typerPhase) { + if same then + // FIXME: This should really be an error, but then FromTasty tests fail + report.warning(s"${cl1.show} and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) + else + report.warning(s"${cl1.show} differs only in case from ${cl2.showLocated}. " + + "Such classes will overwrite one another on case-insensitive filesystems.", cl1.sourcePos) + } + } + } + + sealed transparent trait ImplEarlyInit{ + val int: self.int.type = self.int + val bTypes: self.bTypes.type = self.bTypes + protected val primitives: DottyPrimitives = self.primitives + } + object Impl extends ImplEarlyInit with BCodeSyncAndTry { + class PlainClassBuilder(unit: CompilationUnit) extends SyncAndTryBuilder(unit) + } +} diff --git a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala index e94bda16fbb8..30ad6b29b9f0 100644 --- a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala @@ -7,38 +7,58 @@ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.transform.Erasure import scala.tools.asm.{Handle, Opcodes} import dotty.tools.dotc.core.StdNames +import BTypes.InternalName + +abstract class CoreBTypes { + val bTypes: BTypes + import bTypes._ + + def primitiveTypeMap: Map[Symbol, PrimitiveBType] + + def boxedClasses: Set[ClassBType] + + def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] + + def boxResultType: Map[Symbol, ClassBType] + + def unboxResultType: Map[Symbol, PrimitiveBType] + + def srNothingRef : ClassBType + def srNullRef : ClassBType + + def ObjectRef : ClassBType + def StringRef : ClassBType + def jlStringBuilderRef : ClassBType + def jlStringBufferRef : ClassBType + def jlCharSequenceRef : ClassBType + def jlClassRef : ClassBType + def jlThrowableRef : ClassBType + def jlCloneableRef : ClassBType + def jiSerializableRef : ClassBType + def jlClassCastExceptionRef : ClassBType + def jlIllegalArgExceptionRef : ClassBType + def jliSerializedLambdaRef : ClassBType + + def srBoxesRuntimeRef: ClassBType + + def jliLambdaMetaFactoryMetafactoryHandle : Handle + def jliLambdaMetaFactoryAltMetafactoryHandle : Handle + def jliLambdaDeserializeBootstrapHandle : Handle + def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle + + def asmBoxTo : Map[BType, MethodNameAndType] + def asmUnboxTo: Map[BType, MethodNameAndType] + + def typeOfArrayOp: Map[Int, BType] +} + +abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTypes { + val bTypes: BTypesFromSymbols[I] -/** - * Core BTypes and some other definitions. The initialization of these definitions requies access - * to symbols / types (global). - * - * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To - * make sure the definitions are consistent with the symbols in the current run, the - * `intializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each - * compiler run. - * - * The class BTypesFromSymbols does not directly reference CoreBTypes, but CoreBTypesProxy. The - * reason is that having a `var bTypes: CoreBTypes` would not allow `import bTypes._`. Instead, the - * proxy class holds a `CoreBTypes` in a variable field and forwards to this instance. - * - * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When - * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the - * constructor will actucally go through the proxy. The lazy vals make sure the instance is assigned - * in the proxy before the fields are initialized. - * - * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap - * could not be a perRunCache anymore: the classes defeined here need to be in that map, they are - * added when the ClassBTypes are created. The per run cache removes them, so they would be missing - * in the second run. - */ -class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) { import bTypes._ import int.given import DottyBackendInterface._ - - //import global._ - //import rootMirror.{requiredClass, getClassIfDefined} - //import definitions._ + import dotty.tools.dotc.core.Contexts.Context /** * Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above @@ -56,31 +76,21 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp defn.DoubleClass -> DOUBLE ) - private lazy val BOXED_UNIT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Void]) - private lazy val BOXED_BOOLEAN : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Boolean]) - private lazy val BOXED_BYTE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Byte]) - private lazy val BOXED_SHORT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Short]) - private lazy val BOXED_CHAR : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Character]) - private lazy val BOXED_INT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Integer]) - private lazy val BOXED_LONG : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Long]) - private lazy val BOXED_FLOAT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Float]) - private lazy val BOXED_DOUBLE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Double]) - /** * Map from primitive types to their boxed class type. Useful when pushing class literals onto the * operand stack (ldc instruction taking a class literal), see genConstant. */ lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map( - UNIT -> BOXED_UNIT, - BOOL -> BOXED_BOOLEAN, - BYTE -> BOXED_BYTE, - SHORT -> BOXED_SHORT, - CHAR -> BOXED_CHAR, - INT -> BOXED_INT, - LONG -> BOXED_LONG, - FLOAT -> BOXED_FLOAT, - DOUBLE -> BOXED_DOUBLE - ) + UNIT -> classBTypeFromSymbol(requiredClass[java.lang.Void]), + BOOL -> classBTypeFromSymbol(requiredClass[java.lang.Boolean]), + BYTE -> classBTypeFromSymbol(requiredClass[java.lang.Byte]), + SHORT -> classBTypeFromSymbol(requiredClass[java.lang.Short]), + CHAR -> classBTypeFromSymbol(requiredClass[java.lang.Character]), + INT -> classBTypeFromSymbol(requiredClass[java.lang.Integer]), + LONG -> classBTypeFromSymbol(requiredClass[java.lang.Long]), + FLOAT -> classBTypeFromSymbol(requiredClass[java.lang.Float]), + DOUBLE -> classBTypeFromSymbol(requiredClass[java.lang.Double]) + ) lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet @@ -114,33 +124,35 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp * names of NothingClass and NullClass can't be emitted as-is. * TODO @lry Once there's a 2.11.3 starr, use the commented argument list. The current starr crashes on the type literal `scala.runtime.Nothing$` */ - lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) // (requiredClass[scala.runtime.Nothing$]) - lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) // (requiredClass[scala.runtime.Null$]) - - lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) - lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) - lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) - lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) - lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) - lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) - lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) - lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) // java/lang/Cloneable - lazy val jioSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) // java/io/Serializable - lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) // java/lang/ClassCastException - lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) - lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) - - lazy val srBoxesRunTimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) - - private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) - private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) - private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodHandle]) - private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodHandles.Lookup]) - private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) - private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 - private lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) - - lazy val jliLambdaMetaFactoryMetafactoryHandle: Handle = new Handle( + lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) + lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) + + lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) + lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) + + lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) + lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) + lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) + lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) + lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) + lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) + lazy val jiSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) + lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) + lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) + lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) + + lazy val srBoxesRuntimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) + + private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) + private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) + private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(defn.MethodHandleClass) + private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(defn.MethodHandlesLookupClass) + private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) + private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 + + lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) + + lazy val jliLambdaMetaFactoryMetafactoryHandle = new Handle( Opcodes.H_INVOKESTATIC, jliLambdaMetafactoryRef.internalName, "metafactory", @@ -150,7 +162,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp ).descriptor, /* itf = */ false) - lazy val jliLambdaMetaFactoryAltMetafactoryHandle: Handle = new Handle( + lazy val jliLambdaMetaFactoryAltMetafactoryHandle = new Handle( Opcodes.H_INVOKESTATIC, jliLambdaMetafactoryRef.internalName, "altMetafactory", @@ -159,7 +171,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp jliCallSiteRef ).descriptor, /* itf = */ false) - + lazy val jliLambdaDeserializeBootstrapHandle: Handle = new Handle( Opcodes.H_INVOKESTATIC, srLambdaDeserialize.internalName, @@ -179,19 +191,19 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp jliCallSiteRef ).descriptor, /* itf = */ false) - + /** * Methods in scala.runtime.BoxesRuntime */ lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map( - BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), BOXED_BOOLEAN)), - BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), BOXED_BYTE)), - CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), BOXED_CHAR)), - SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), BOXED_SHORT)), - INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), BOXED_INT)), - LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), BOXED_LONG)), - FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), BOXED_FLOAT)), - DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), BOXED_DOUBLE)) + BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), boxedClassOfPrimitive(BOOL))), + BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), boxedClassOfPrimitive(BYTE))), + CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), boxedClassOfPrimitive(CHAR))), + SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), boxedClassOfPrimitive(SHORT))), + INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), boxedClassOfPrimitive(INT))), + LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), boxedClassOfPrimitive(LONG))), + FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), boxedClassOfPrimitive(FLOAT))), + DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), boxedClassOfPrimitive(DOUBLE))) ) lazy val asmUnboxTo: Map[BType, MethodNameAndType] = Map( @@ -220,75 +232,3 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp ) } } - -/** - * This trait make some core BTypes availalbe that don't depend on a Global instance. Some core - * BTypes are required to be accessible in the BTypes trait, which does not have access to Global. - * - * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example - * the type Symbol in - * def primitiveTypeMap: Map[Symbol, PrimitiveBType] - */ -trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { - val bTypes: BTS - import bTypes._ - - def boxedClasses: Set[ClassBType] - - def srNothingRef : ClassBType - def srNullRef : ClassBType - - def ObjectRef : ClassBType - def jlCloneableRef : ClassBType - def jiSerializableRef : ClassBType -} - -/** - * See comment in class [[CoreBTypes]]. - */ -final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) extends CoreBTypesProxyGlobalIndependent[BTFS] { - import bTypes._ - - private var _coreBTypes: CoreBTypes[bTypes.type] = _ - def setBTypes(coreBTypes: CoreBTypes[BTFS]): Unit = { - _coreBTypes = coreBTypes.asInstanceOf[CoreBTypes[bTypes.type]] - } - - def primitiveTypeMap: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeMap - - def boxedClasses: Set[ClassBType] = _coreBTypes.boxedClasses - - def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _coreBTypes.boxedClassOfPrimitive - - def boxResultType: Map[Symbol, ClassBType] = _coreBTypes.boxResultType - - def unboxResultType: Map[Symbol, PrimitiveBType] = _coreBTypes.unboxResultType - - def srNothingRef : ClassBType = _coreBTypes.srNothingRef - def srNullRef : ClassBType = _coreBTypes.srNullRef - - def ObjectRef : ClassBType = _coreBTypes.ObjectRef - def StringRef : ClassBType = _coreBTypes.StringRef - def jlStringBuilderRef : ClassBType = _coreBTypes.jlStringBuilderRef - def jlStringBufferRef : ClassBType = _coreBTypes.jlStringBufferRef - def jlCharSequenceRef : ClassBType = _coreBTypes.jlCharSequenceRef - def jlClassRef : ClassBType = _coreBTypes.jlClassRef - def jlThrowableRef : ClassBType = _coreBTypes.jlThrowableRef - def jlCloneableRef : ClassBType = _coreBTypes.jlCloneableRef - def jiSerializableRef : ClassBType = _coreBTypes.jioSerializableRef - def jlClassCastExceptionRef : ClassBType = _coreBTypes.jlClassCastExceptionRef - def jlIllegalArgExceptionRef : ClassBType = _coreBTypes.jlIllegalArgExceptionRef - def jliSerializedLambdaRef : ClassBType = _coreBTypes.jliSerializedLambdaRef - - def srBoxesRuntimeRef: ClassBType = _coreBTypes.srBoxesRunTimeRef - - def jliLambdaMetaFactoryMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryMetafactoryHandle - def jliLambdaMetaFactoryAltMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - def jliLambdaDeserializeBootstrapHandle : Handle = _coreBTypes.jliLambdaDeserializeBootstrapHandle - def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle = _coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle - - def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo - def asmUnboxTo: Map[BType, MethodNameAndType] = _coreBTypes.asmUnboxTo - - def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp -} diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index 5461ff81341c..b2278c3f0ce8 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -14,6 +14,7 @@ import Contexts._ import Types._ import Symbols._ import Phases._ +import Decorators.em import dotty.tools.dotc.util.ReadOnlyMap import dotty.tools.dotc.report @@ -21,10 +22,10 @@ import dotty.tools.dotc.report import tpd._ import StdNames.nme -import NameKinds.LazyBitMapName +import NameKinds.{LazyBitMapName, LazyLocalName} import Names.Name -class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { +class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] @@ -71,7 +72,7 @@ class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap def _1: Type = field.tpe match { case JavaArrayType(elem) => elem case _ => - report.error(s"JavaSeqArray with type ${field.tpe} reached backend: $field", ctx.source.atSpan(field.span)) + report.error(em"JavaSeqArray with type ${field.tpe} reached backend: $field", ctx.source.atSpan(field.span)) UnspecifiedErrorType } def _2: List[Tree] = field.elems @@ -128,10 +129,11 @@ object DottyBackendInterface { * the new lazy val encoding: https://github.com/lampepfl/dotty/issues/7140 */ def isStaticModuleField(using Context): Boolean = - sym.owner.isStaticModuleClass && sym.isField && !sym.name.is(LazyBitMapName) + sym.owner.isStaticModuleClass && sym.isField && !sym.name.is(LazyBitMapName) && !sym.name.is(LazyLocalName) def isStaticMember(using Context): Boolean = (sym ne NoSymbol) && - (sym.is(JavaStatic) || sym.isScalaStatic || sym.isStaticModuleField) + (sym.is(JavaStatic) || sym.isScalaStatic || sym.isStaticModuleField) + // guard against no sumbol cause this code is executed to select which call type(static\dynamic) to use to call array.clone /** diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 73e8fd9edb3b..469a6ea57679 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -1,42 +1,16 @@ package dotty.tools.backend.jvm -import scala.language.unsafeNulls - import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} -import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Phases.Phase - -import scala.collection.mutable -import scala.jdk.CollectionConverters._ -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.interfaces import dotty.tools.dotc.report - -import dotty.tools.dotc.util.SourceFile -import java.util.Optional - import dotty.tools.dotc.core._ -import dotty.tools.dotc.sbt.ExtractDependencies +import dotty.tools.dotc.interfaces.CompilerCallback import Contexts._ -import Phases._ import Symbols._ - -import java.io.DataOutputStream -import java.nio.channels.ClosedByInterruptException - -import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } - -import scala.tools.asm -import scala.tools.asm.Handle -import scala.tools.asm.tree._ -import tpd._ -import StdNames._ import dotty.tools.io._ -import scala.tools.asm.MethodTooLargeException -import scala.tools.asm.ClassTooLargeException +import scala.collection.mutable -class GenBCode extends Phase { +class GenBCode extends Phase { self => override def phaseName: String = GenBCode.name @@ -51,618 +25,85 @@ class GenBCode extends Phase { private val entryPoints = new mutable.HashSet[String]() def registerEntryPoint(s: String): Unit = entryPoints += s - private var myOutput: AbstractFile = _ - - private def outputDir(using Context): AbstractFile = { - if (myOutput eq null) - myOutput = ctx.settings.outputDir.value - myOutput + private var _backendInterface: DottyBackendInterface = _ + def backendInterface(using ctx: Context): DottyBackendInterface = { + if _backendInterface eq null then + // Enforce usage of FreshContext so we would be able to modify compilation unit between runs + val backendCtx = ctx match + case fc: FreshContext => fc + case ctx => ctx.fresh + _backendInterface = DottyBackendInterface(superCallsMap)(using backendCtx) + _backendInterface } - private var myPrimitives: DottyPrimitives = null - - override def run(using Context): Unit = - if myPrimitives == null then myPrimitives = new DottyPrimitives(ctx) - new GenBCodePipeline( - DottyBackendInterface(outputDir, superCallsMap), - myPrimitives - ).run(ctx.compilationUnit.tpdTree) - - - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - outputDir match - case jar: JarArchive => - updateJarManifestWithMainClass(jar, entryPoints.toList) - case _ => - try super.runOn(units) - finally outputDir match { - case jar: JarArchive => - if (ctx.run.nn.suspendedUnits.nonEmpty) - // If we close the jar the next run will not be able to write on the jar. - // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. - report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") - - jar.close() - case _ => - } + private var _codeGen: CodeGen = _ + def codeGen(using Context): CodeGen = { + if _codeGen eq null then + val int = backendInterface + val dottyPrimitives = new DottyPrimitives(ctx) + _codeGen = new CodeGen(int, dottyPrimitives)(bTypes.asInstanceOf[BTypesFromSymbols[int.type]]) + _codeGen } - private def updateJarManifestWithMainClass(jarArchive: JarArchive, entryPoints: List[String])(using Context): Unit = - val mainClass = Option.when(!ctx.settings.XmainClass.isDefault)(ctx.settings.XmainClass.value).orElse { - entryPoints match - case List(mainClass) => - Some(mainClass) - case Nil => - report.warning("No Main-Class designated or discovered.") - None - case mcs => - report.warning(s"No Main-Class due to multiple entry points:\n ${mcs.mkString("\n ")}") - None - } - mainClass.map { mc => - val manifest = Jar.WManifest() - manifest.mainClass = mc - val file = jarArchive.subdirectoryNamed("META-INF").fileNamed("MANIFEST.MF") - val os = file.output - manifest.underlying.write(os) - os.close() - } - end updateJarManifestWithMainClass -} - -object GenBCode { - val name: String = "genBCode" - val description: String = "generate JVM bytecode" -} - -class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrimitives)(using Context) extends BCodeSyncAndTry { - import DottyBackendInterface.symExtensions - - private var tree: Tree = _ - - private val sourceFile: SourceFile = ctx.compilationUnit.source - - /** Convert a `dotty.tools.io.AbstractFile` into a - * `dotty.tools.dotc.interfaces.AbstractFile`. - */ - private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = - new interfaces.AbstractFile { - override def name = absfile.name - override def path = absfile.path - override def jfile = Optional.ofNullable(absfile.file) - } - - final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit) - -// class BCodePhase() { - - private var bytecodeWriter : BytecodeWriter = null - private var mirrorCodeGen : JMirrorBuilder = null - - /* ---------------- q1 ---------------- */ - - case class Item1(arrivalPos: Int, cd: TypeDef, cunit: CompilationUnit) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } + private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = _ + def bTypes(using Context): BTypesFromSymbols[DottyBackendInterface] = { + if _bTypes eq null then + _bTypes = BTypesFromSymbols(backendInterface, frontendAccess) + _bTypes } - private val poison1 = Item1(Int.MaxValue, null, ctx.compilationUnit) - private val q1 = new java.util.LinkedList[Item1] - /* ---------------- q2 ---------------- */ - - case class SubItem2(classNode: asm.tree.ClassNode, - file: dotty.tools.io.AbstractFile) - - case class Item2(arrivalPos: Int, - mirror: SubItem2, - plain: SubItem2) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } + private var _frontendAccess: PostProcessorFrontendAccess | Null = _ + def frontendAccess(using Context): PostProcessorFrontendAccess = { + if _frontendAccess eq null then + _frontendAccess = PostProcessorFrontendAccess.Impl(backendInterface, entryPoints) + _frontendAccess.nn } - private val poison2 = Item2(Int.MaxValue, null, null) - private val q2 = new _root_.java.util.LinkedList[Item2] - - /* ---------------- q3 ---------------- */ - - /* - * An item of queue-3 (the last queue before serializing to disk) contains three of these - * (one for each of mirror and plain classes). - * - * @param jclassName internal name of the class - * @param jclassBytes bytecode emitted for the class SubItem3 represents - */ - case class SubItem3( - jclassName: String, - jclassBytes: Array[Byte], - jclassFile: dotty.tools.io.AbstractFile - ) - - case class Item3(arrivalPos: Int, - mirror: SubItem3, - plain: SubItem3) { - - def isPoison: Boolean = { arrivalPos == Int.MaxValue } - } - private val i3comparator = new java.util.Comparator[Item3] { - override def compare(a: Item3, b: Item3) = { - if (a.arrivalPos < b.arrivalPos) -1 - else if (a.arrivalPos == b.arrivalPos) 0 - else 1 - } + private var _postProcessor: PostProcessor | Null = _ + def postProcessor(using Context): PostProcessor = { + if _postProcessor eq null then + _postProcessor = new PostProcessor(frontendAccess, bTypes) + _postProcessor.nn } - private val poison3 = Item3(Int.MaxValue, null, null) - private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator) - - /* - * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2 - */ - class Worker1(needsOutFolder: Boolean) { - - private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] - private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { - val lowerCaseName = javaClassName.toLowerCase - lowerCaseNames.get(lowerCaseName) match { - case None => - lowerCaseNames.put(lowerCaseName, classSymbol) - case Some(dupClassSym) => - // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting - val (cl1, cl2) = - if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) - else (dupClassSym, classSymbol) - val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString - atPhase(typerPhase) { - if (same) - report.warning( // FIXME: This should really be an error, but then FromTasty tests fail - s"${cl1.show} and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) - else - report.warning(s"${cl1.show} differs only in case from ${cl2.showLocated}. " + - "Such classes will overwrite one another on case-insensitive filesystems.", cl1.sourcePos) - } - } - } - - def run(): Unit = { - while (true) { - val item = q1.poll - if (item.isPoison) { - q2 add poison2 - return - } - else { - try { /*withCurrentUnit(item.cunit)*/(visit(item)) } - catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.cunit.source.file.name}") - throw ex - } - } - } - } - - /* - * Checks for duplicate internal names case-insensitively, - * builds ASM ClassNodes for mirror and plain classes; - * enqueues them in queue-2. - * - */ - def visit(item: Item1): Boolean = { - val Item1(arrivalPos, cd, cunit) = item - val claszSymbol = cd.symbol - - // -------------- mirror class, if needed -------------- - val mirrorC = - if (claszSymbol.isTopLevelModuleClass) { - if (claszSymbol.companionClass == NoSymbol) { - mirrorCodeGen.genMirrorClass(claszSymbol, cunit) - } else { - report.log(s"No mirror class for module with linked class: ${claszSymbol.showFullName}") - null - } - } else null - - // -------------- "plain" class -------------- - val pcb = new PlainClassBuilder(cunit) - pcb.genPlainClass(cd) - val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName) else null; - val plainC = pcb.cnode - - if (claszSymbol.isClass) // @DarkDimius is this test needed here? - for (binary <- ctx.compilationUnit.pickled.get(claszSymbol.asClass)) { - val store = if (mirrorC ne null) mirrorC else plainC - val tasty = - val outTastyFile = getFileForClassfile(outF, store.name, ".tasty") - val outstream = new DataOutputStream(outTastyFile.bufferedOutput) - try outstream.write(binary()) - catch case ex: ClosedByInterruptException => - try - outTastyFile.delete() // don't leave an empty or half-written tastyfile around after an interrupt - catch - case _: Throwable => - throw ex - finally outstream.close() - - val uuid = new TastyHeaderUnpickler(binary()).readHeader() - val lo = uuid.getMostSignificantBits - val hi = uuid.getLeastSignificantBits - - // TASTY attribute is created but only the UUID bytes are stored in it. - // A TASTY attribute has length 16 if and only if the .tasty file exists. - val buffer = new TastyBuffer(16) - buffer.writeUncompressedLong(lo) - buffer.writeUncompressedLong(hi) - buffer.bytes - - val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) - store.visitAttribute(dataAttr) - } - - - // ----------- create files - - val classNodes = List(mirrorC, plainC) - val classFiles = classNodes.map(cls => - if (outF != null && cls != null) { - try { - checkForCaseConflict(cls.name, claszSymbol) - getFileForClassfile(outF, cls.name, ".class") - } catch { - case e: FileConflictException => - report.error(s"error writing ${cls.name}: ${e.getMessage}") - null - } - } else null - ) - - // ----------- compiler and sbt's callbacks - - val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { - (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) - } - - for ((cls, clsFile) <- classNodes.zip(classFiles)) { - if (cls != null) { - val className = cls.name.replace('/', '.') - if (ctx.compilerCallback != null) - ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - if (ctx.sbtCallback != null) { - if (isLocal) - ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), clsFile.file) - else { - ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), clsFile.file, - className, fullClassName) - } - } - } - } - - // ----------- hand over to pipeline-2 - - val item2 = - Item2(arrivalPos, - SubItem2(mirrorC, classFiles(0)), - SubItem2(plainC, classFiles(1))) - - q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. - } // end of method visit(Item1) - - } // end of class BCodePhase.Worker1 - - /* - * Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level: - * - * (a) no optimization involves: - * - converting the plain ClassNode to byte array and placing it on queue-3 - */ - class Worker2 { - import bTypes.ClassBType - import bTypes.coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - // lazy val localOpt = new LocalOpt(new Settings()) - - private def localOptimizations(classNode: ClassNode): Unit = { - // BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode)) - } - - - /* Return an array of all serializable lambdas in this class */ - private def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { - val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] - for (m <- classNode.methods.asScala) { - val iter = m.instructions.iterator - while (iter.hasNext) { - val insn = iter.next() - insn match { - case indy: InvokeDynamicInsnNode - if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => - import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE - val metafactoryFlags = indy.bsmArgs(3).asInstanceOf[Integer].toInt - val isSerializable = (metafactoryFlags & FLAG_SERIALIZABLE) != 0 - if isSerializable then - val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] - indyLambdaBodyMethods += implMethod - case _ => - } - } - } - indyLambdaBodyMethods.toArray - } - - /* - * Add: - * - * private static Object $deserializeLambda$(SerializedLambda l) { - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$0](l) - * catch { - * case i: IllegalArgumentException => - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$1](l) - * catch { - * case i: IllegalArgumentException => - * ... - * return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup${NUM_GROUPS-1}](l) - * } - * - * We use invokedynamic here to enable caching within the deserializer without needing to - * host a static field in the enclosing class. This allows us to add this method to interfaces - * that define lambdas in default methods. - * - * SI-10232 we can't pass arbitrary number of method handles to the final varargs parameter of the bootstrap - * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target - * methods. - */ - private def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { - import asm.Opcodes._ - import bTypes._ - import coreBTypes._ - - val cw = classNode - - // Make sure to reference the ClassBTypes of all types that are used in the code generated - // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to - // `classBTypeFromInternalNameMap`. When writing the classfile, the asm ClassWriter computes - // stack map frames and invokes the `getCommonSuperClass` method. This method expects all - // ClassBTypes mentioned in the source code to exist in the map. - - val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor - - val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) - def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { - mv.visitVarInsn(ALOAD, 0) - mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) - } - - val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java - val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray - val numGroups = groups.length - - import scala.tools.asm.Label - val initialLabels = Array.fill(numGroups - 1)(new Label()) - val terminalLabel = new Label - def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) - - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) - } - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitLabel(label) - emitLambdaDeserializeIndy(groups(i).toIndexedSeq) - mv.visitInsn(ARETURN) - } - mv.visitLabel(terminalLabel) - emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) - mv.visitInsn(ARETURN) - } - - private def setInnerClasses(classNode: ClassNode): Unit = if (classNode != null) { - classNode.innerClasses.clear() - val (declared, referred) = collectNestedClasses(classNode) - addInnerClasses(classNode, declared, referred) - } - - /** - * Visit the class node and collect all referenced nested classes. - */ - private def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { - // type InternalName = String - val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { - def declaredNestedClasses(internalName: InternalName): List[ClassBType] = - bTypes.classBTypeFromInternalName(internalName).info.memberClasses - - def getClassIfNested(internalName: InternalName): Option[ClassBType] = { - val c = bTypes.classBTypeFromInternalName(internalName) - Option.when(c.isNestedClass)(c) - } - - def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { - // don't crash on invalid generic signatures - } - } - c.visit(classNode) - (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) - } - - def run(): Unit = { - while (true) { - val item = q2.poll - if (item.isPoison) { - q3 add poison3 - return - } - else { - try { - val plainNode = item.plain.classNode - localOptimizations(plainNode) - val serializableLambdas = collectSerializableLambdas(plainNode) - if (serializableLambdas.nonEmpty) - addLambdaDeserialize(plainNode, serializableLambdas) - setInnerClasses(plainNode) - setInnerClasses(item.mirror.classNode) - addToQ3(item) - } catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.plain.classNode.name}") - throw ex - } + override def run(using ctx: Context): Unit = + // CompilationUnit is the only component that will differ between each run invocation + // We need to update it to have correct source positions. + // FreshContext is always enforced when creating backend interface + backendInterface.ctx + .asInstanceOf[FreshContext] + .setCompilationUnit(ctx.compilationUnit) + val generated = codeGen.genUnit(ctx.compilationUnit) + // In Scala 2, the backend might use global optimizations which might delay post-processing to build the call graph. + // In Scala 3, we don't perform backend optimizations and always perform post-processing immediately. + // https://github.com/scala/scala/pull/6057 + postProcessor.postProcessAndSendToDisk(generated) + (ctx.compilerCallback: CompilerCallback | Null) match { + case cb: CompilerCallback => cb.onSourceCompiled(ctx.source) + case null => () + } + + override def runOn(units: List[CompilationUnit])(using ctx:Context): List[CompilationUnit] = { + try super.runOn(units) + finally + // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized + if _frontendAccess ne null then + frontendAccess.compilerSettings.outputDirectory match { + case jar: JarArchive => + if (ctx.run.nn.suspendedUnits.nonEmpty) + // If we close the jar the next run will not be able to write on the jar. + // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. + report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") + + jar.close() + case _ => () } - } - } - - private def addToQ3(item: Item2) = { - - def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = { - val cw = new CClassWriter(extraProc) - cn.accept(cw) - cw.toByteArray - } - - val Item2(arrivalPos, SubItem2(mirror, mirrorFile), SubItem2(plain, plainFile)) = item - - val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror), mirrorFile) - val plainC = SubItem3(plain.name, getByteArray(plain), plainFile) - - if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) { - if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes) - AsmUtils.traceClass(plainC.jclassBytes) - } - - q3 add Item3(arrivalPos, mirrorC, plainC) - } - - } // end of class BCodePhase.Worker2 - - var arrivalPos: Int = 0 - - /* - * A run of the BCodePhase phase comprises: - * - * (a) set-up steps (most notably supporting maps in `BCodeTypes`, - * but also "the" writer where class files in byte-array form go) - * - * (b) building of ASM ClassNodes, their optimization and serialization. - * - * (c) tear down (closing the classfile-writer and clearing maps) - * - */ - def run(t: Tree)(using Context): Unit = { - this.tree = t - - // val bcodeStart = Statistics.startTimer(BackendStats.bcodeTimer) - - // val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer) - arrivalPos = 0 // just in case - // scalaPrimitives.init() - bTypes.intializeCoreBTypes() - // Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart) - - // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated. - bytecodeWriter = initBytecodeWriter() - mirrorCodeGen = new JMirrorBuilder - - val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter] - buildAndSendToDisk(needsOutfileForSymbol) - - // closing output files. - bytecodeWriter.close() - // Statistics.stopTimer(BackendStats.bcodeTimer, bcodeStart) - - if (ctx.compilerCallback != null) - ctx.compilerCallback.onSourceCompiled(sourceFile) - - /* TODO Bytecode can be verified (now that all classfiles have been written to disk) - * - * (1) asm.util.CheckAdapter.verify() - * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw) - * passing a custom ClassLoader to verify inter-dependent classes. - * Alternatively, - * - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool). - * - -Xverify:all - * - * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()` - * - */ + if _postProcessor ne null then + postProcessor.classfileWriter.close() } +} - /* - * Sequentially: - * (a) place all ClassDefs in queue-1 - * (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2 - * (c) dequeue one at a time from queue-2, convert it to byte-array, place in queue-3 - * (d) serialize to disk by draining queue-3. - */ - private def buildAndSendToDisk(needsOutFolder: Boolean)(using Context) = { - try - feedPipeline1() - // val genStart = Statistics.startTimer(BackendStats.bcodeGenStat) - (new Worker1(needsOutFolder)).run() - // Statistics.stopTimer(BackendStats.bcodeGenStat, genStart) - - (new Worker2).run() - - // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) - drainQ3() - // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) - catch - case e: MethodTooLargeException => - val method = - s"${e.getClassName.replaceAll("/", ".")}.${e.getMethodName}" - val msg = - s"Generated bytecode for method '$method' is too large. Size: ${e.getCodeSize} bytes. Limit is 64KB" - report.error(msg) - case e: ClassTooLargeException => - val msg = - s"Class '${e.getClassName.replaceAll("/", ".")}' is too large. Constant pool size: ${e.getConstantPoolCount}. Limit is 64K entries" - report.error(msg) - - } - - /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */ - private def feedPipeline1() = { - def gen(tree: Tree): Unit = { - tree match { - case EmptyTree => () - case PackageDef(_, stats) => stats foreach gen - case ValDef(name, tpt, rhs) => () // module val not emitted - case cd: TypeDef => - q1 add Item1(arrivalPos, cd, int.ctx.compilationUnit) - arrivalPos += 1 - } - } - gen(tree) - q1 add poison1 - } - - /* Pipeline that writes classfile representations to disk. */ - private def drainQ3() = { - - def sendToDisk(cfr: SubItem3): Unit = { - if (cfr != null){ - val SubItem3(jclassName, jclassBytes, jclassFile) = cfr - bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, jclassFile) - } - } - - var moreComing = true - // `expected` denotes the arrivalPos whose Item3 should be serialized next - var expected = 0 - - while (moreComing) { - val incoming = q3.poll - moreComing = !incoming.isPoison - if (moreComing) { - val item = incoming - sendToDisk(item.mirror) - sendToDisk(item.plain) - expected += 1 - } - } - - // we're done - assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1") - assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2") - assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3") - - } - //} // end of class BCodePhase +object GenBCode { + val name: String = "genBCode" + val description: String = "generate JVM bytecode" } diff --git a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala index e9e532933290..c16bc70fc3b0 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala @@ -185,13 +185,13 @@ abstract class GenericSignatureVisitor(nestedOnly: Boolean) { } // Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf -// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 +// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSignatureVisitor(nestedOnly) { - type InternalName = String + type InternalName = String def declaredNestedClasses(internalName: InternalName): List[T] def getClassIfNested(internalName: InternalName): Option[T] - + val declaredInnerClasses = mutable.Set.empty[T] val referredInnerClasses = mutable.Set.empty[T] diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala new file mode 100644 index 000000000000..606b5645aa24 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -0,0 +1,117 @@ +package dotty.tools.backend.jvm + +import scala.collection.mutable.ListBuffer +import dotty.tools.dotc.util.{SourcePosition, NoSourcePosition} +import dotty.tools.io.AbstractFile +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.em +import scala.tools.asm.ClassWriter +import scala.tools.asm.tree.ClassNode + +/** + * Implements late stages of the backend that don't depend on a Global instance, i.e., + * optimizations, post-processing and classfile serialization and writing. + */ +class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: BTypes) { + self => + import bTypes.* + import frontendAccess.{backendReporting, compilerSettings} + import int.given + + val backendUtils = new BackendUtils(this) + val classfileWriter = ClassfileWriter(frontendAccess) + + def postProcessAndSendToDisk(generatedDefs: GeneratedDefs): Unit = { + val GeneratedDefs(classes, tasty) = generatedDefs + for (GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated) <- classes) { + val bytes = + try + if !isArtifact then setSerializableLambdas(classNode) + setInnerClasses(classNode) + serializeClass(classNode) + catch + case e: java.lang.RuntimeException if e.getMessage != null && e.getMessage.nn.contains("too large!") => + backendReporting.error(em"Could not write class ${classNode.name} because it exceeds JVM code size limits. ${e.getMessage}") + null + case ex: Throwable => + ex.printStackTrace() + backendReporting.error(em"Error while emitting ${classNode.name}\n${ex.getMessage}") + null + + if (bytes != null) { + if (AsmUtils.traceSerializedClassEnabled && classNode.name.nn.contains(AsmUtils.traceSerializedClassPattern)) + AsmUtils.traceClass(bytes) + + val clsFile = classfileWriter.writeClass(classNode.name.nn, bytes, sourceFile) + if clsFile != null then onFileCreated(clsFile) + } + } + + for (GeneratedTasty(classNode, binaryGen) <- tasty){ + classfileWriter.writeTasty(classNode.name.nn, binaryGen()) + } + } + + private def setSerializableLambdas(classNode: ClassNode): Unit = { + import backendUtils.{collectSerializableLambdas, addLambdaDeserialize} + val serializableLambdas = collectSerializableLambdas(classNode) + if serializableLambdas.nonEmpty then + addLambdaDeserialize(classNode, serializableLambdas) + } + + private def setInnerClasses(classNode: ClassNode): Unit = { + import backendUtils.{collectNestedClasses, addInnerClasses} + classNode.innerClasses.nn.clear() + val (declared, referred) = collectNestedClasses(classNode) + addInnerClasses(classNode, declared, referred) + } + + def serializeClass(classNode: ClassNode): Array[Byte] = { + val cw = new ClassWriterWithBTypeLub(backendUtils.extraProc) + classNode.accept(cw) + cw.toByteArray.nn + } + + // ----------------------------------------------------------------------------------------- + // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) + // Background: + // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf + // http://comments.gmane.org/gmane.comp.java.vm.languages/2293 + // https://github.com/scala/bug/issues/3872 + // ----------------------------------------------------------------------------------------- + + /* An `asm.ClassWriter` that uses `jvmWiseLUB()` + * The internal name of the least common ancestor of the types given by inameA and inameB. + * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow + */ + final class ClassWriterWithBTypeLub(flags: Int) extends ClassWriter(flags) { + + /** + * This method is used by asm when computing stack map frames. It is thread-safe: it depends + * only on the BTypes component, which does not depend on global. + * TODO @lry move to a different place where no global is in scope, on bTypes. + */ + override def getCommonSuperClass(inameA: String, inameB: String): String = { + // All types that appear in a class node need to have their ClassBType cached, see [[cachedClassBType]]. + val a = classBTypeFromInternalName(inameA) + val b = classBTypeFromInternalName(inameB) + val lub = a.jvmWiseLUB(b) + val lubName = lub.internalName + assert(lubName != "scala/Any") + lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. + } + } +} + +/** + * The result of code generation. [[isArtifact]] is `true` for mirror. + */ +case class GeneratedClass(classNode: ClassNode, sourceFile: AbstractFile, isArtifact: Boolean, onFileCreated: AbstractFile => Unit) +case class GeneratedTasty(classNode: ClassNode, tastyGen: () => Array[Byte]) +case class GeneratedDefs(classes: List[GeneratedClass], tasty: List[GeneratedTasty]) + +// Temporary class, will be refactored in a future commit +trait ClassWriterForPostProcessor { + type InternalName = String + def write(bytes: Array[Byte], className: InternalName, sourceFile: AbstractFile): Unit +} diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala new file mode 100644 index 000000000000..80ee68bc94c3 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala @@ -0,0 +1,79 @@ +package dotty.tools.backend.jvm + +import scala.collection.mutable.{Clearable, HashSet} +import dotty.tools.dotc.util.* +import dotty.tools.dotc.reporting.Message +import dotty.tools.io.AbstractFile +import java.util.{Collection => JCollection, Map => JMap} +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.report +import dotty.tools.dotc.core.Phases + +/** + * Functionality needed in the post-processor whose implementation depends on the compiler + * frontend. All methods are synchronized. + */ +sealed abstract class PostProcessorFrontendAccess { + import PostProcessorFrontendAccess._ + + def compilerSettings: CompilerSettings + def backendReporting: BackendReporting + def getEntryPoints: List[String] + + private val frontendLock: AnyRef = new Object() + inline final def frontendSynch[T](inline x: => T): T = frontendLock.synchronized(x) +} + +object PostProcessorFrontendAccess { + sealed trait CompilerSettings { + def debug: Boolean + def target: String // javaOutputVersion + + def dumpClassesDirectory: Option[String] + def outputDirectory: AbstractFile + + def mainClass: Option[String] + } + + sealed trait BackendReporting { + def error(message: Context ?=> Message): Unit + def warning(message: Context ?=> Message): Unit + def log(message: Context ?=> String): Unit + } + + class Impl[I <: DottyBackendInterface](val int: I, entryPoints: HashSet[String]) extends PostProcessorFrontendAccess { + import int.given + lazy val compilerSettings: CompilerSettings = buildCompilerSettings() + + private def buildCompilerSettings(): CompilerSettings = new CompilerSettings { + extension [T](s: dotty.tools.dotc.config.Settings.Setting[T]) + def valueSetByUser: Option[T] = + Option(s.value).filter(_ != s.default) + def s = ctx.settings + + lazy val target = + val releaseValue = Option(s.javaOutputVersion.value).filter(_.nonEmpty) + val targetValue = Option(s.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) + (releaseValue, targetValue) match + case (Some(release), None) => release + case (None, Some(target)) => target + case (Some(release), Some(_)) => + report.warning(s"The value of ${s.XuncheckedJavaOutputVersion.name} was overridden by ${ctx.settings.javaOutputVersion.name}") + release + case (None, None) => "8" // least supported version by default + + lazy val debug: Boolean = ctx.debug + lazy val dumpClassesDirectory: Option[String] = s.Ydumpclasses.valueSetByUser + lazy val outputDirectory: AbstractFile = s.outputDir.value + lazy val mainClass: Option[String] = s.XmainClass.valueSetByUser + } + + object backendReporting extends BackendReporting { + def error(message: Context ?=> Message): Unit = frontendSynch(report.error(message)) + def warning(message: Context ?=> Message): Unit = frontendSynch(report.warning(message)) + def log(message: Context ?=> String): Unit = frontendSynch(report.log(message)) + } + + def getEntryPoints: List[String] = frontendSynch(entryPoints.toList) + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala index 2d4c3ce5c9c4..bc453aec17af 100644 --- a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala +++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala @@ -8,6 +8,7 @@ import Contexts._ import Names.TermName, StdNames._ import Types.{JavaArrayType, UnspecifiedErrorType, Type} import Symbols.{Symbol, NoSymbol} +import Decorators.em import dotc.report import dotc.util.ReadOnlyMap @@ -66,7 +67,7 @@ class DottyPrimitives(ictx: Context) { case defn.ArrayOf(el) => el case JavaArrayType(el) => el case _ => - report.error(s"expected Array $tpe") + report.error(em"expected Array $tpe") UnspecifiedErrorType } @@ -133,7 +134,7 @@ class DottyPrimitives(ictx: Context) { def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { val alts = cls.info.member(method).alternatives.map(_.symbol) if (alts.isEmpty) - report.error(s"Unknown primitive method $cls.$method") + report.error(em"Unknown primitive method $cls.$method") else alts foreach (s => addPrimitive(s, s.info.paramInfoss match { diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 8ec19bb994b8..eee791852fde 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -125,7 +125,14 @@ class JSCodeGen()(using genCtx: Context) { /** Implicitly materializes the current local name generator. */ implicit def implicitLocalNames: LocalNameGenerator = localNames.get - private def currentClassType = encodeClassType(currentClassSym) + def currentThisType: jstpe.Type = { + encodeClassType(currentClassSym) match { + case tpe @ jstpe.ClassType(cls) => + jstpe.BoxedClassToPrimType.getOrElse(cls, tpe) + case tpe => + tpe + } + } /** Returns a new fresh local identifier. */ private def freshLocalIdent()(implicit pos: Position): js.LocalIdent = @@ -1023,7 +1030,7 @@ class JSCodeGen()(using genCtx: Context) { // Constructor of a non-native JS class ------------------------------------ def genJSClassCapturesAndConstructor(constructorTrees: List[DefDef])( - implicit pos: SourcePosition): (List[js.ParamDef], js.JSMethodDef) = { + implicit pos: SourcePosition): (List[js.ParamDef], js.JSConstructorDef) = { /* We need to merge all Scala constructors into a single one because the * IR, like JavaScript, only allows a single one. * @@ -1095,20 +1102,21 @@ class JSCodeGen()(using genCtx: Context) { (exports.result(), jsClassCaptures.result()) } + // The name 'constructor' is used for error reporting here val (formalArgs, restParam, overloadDispatchBody) = jsExportsGen.genOverloadDispatch(JSName.Literal("constructor"), exports, jstpe.IntType) val overloadVar = js.VarDef(freshLocalIdent("overload"), NoOriginalName, jstpe.IntType, mutable = false, overloadDispatchBody) - val ctorStats = genJSClassCtorStats(overloadVar.ref, ctorTree) - - val constructorBody = js.Block( - paramVarDefs ::: List(overloadVar, ctorStats, js.Undefined())) + val constructorBody = wrapJSCtorBody( + paramVarDefs :+ overloadVar, + genJSClassCtorBody(overloadVar.ref, ctorTree), + js.Undefined() :: Nil + ) - val constructorDef = js.JSMethodDef( - js.MemberFlags.empty, - js.StringLiteral("constructor"), + val constructorDef = js.JSConstructorDef( + js.MemberFlags.empty.withNamespace(js.MemberNamespace.Constructor), formalArgs, restParam, constructorBody)(OptimizerHints.empty, None) (jsClassCaptures, constructorDef) @@ -1150,7 +1158,8 @@ class JSCodeGen()(using genCtx: Context) { assert(jsSuperCall.isDefined, s"Did not find Super call in primary JS construtor at ${dd.sourcePos}") - new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), jsSuperCall.get :: jsStats.result()) + new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), + js.JSConstructorBody(Nil, jsSuperCall.get, jsStats.result())(dd.span)) } private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = { @@ -1251,9 +1260,9 @@ class JSCodeGen()(using genCtx: Context) { (jsExport, jsClassCaptures) } - /** generates a sequence of JS constructor statements based on a constructor tree. */ - private def genJSClassCtorStats(overloadVar: js.VarRef, - ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.Tree = { + /** Generates a JS constructor body based on a constructor tree. */ + private def genJSClassCtorBody(overloadVar: js.VarRef, + ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.JSConstructorBody = { /* generates a statement that conditionally executes body iff the chosen * overload is any of the descendants of `tree` (including itself). @@ -1348,13 +1357,19 @@ class JSCodeGen()(using genCtx: Context) { val primaryCtor = ctorTree.ctor val secondaryCtorTrees = ctorTree.subCtors - js.Block( - secondaryCtorTrees.map(preStats(_, primaryCtor.paramsAndInfo)) ++ - primaryCtor.body ++ + wrapJSCtorBody( + secondaryCtorTrees.map(preStats(_, primaryCtor.paramsAndInfo)), + primaryCtor.body, secondaryCtorTrees.map(postStats(_)) ) } + private def wrapJSCtorBody(before: List[js.Tree], body: js.JSConstructorBody, + after: List[js.Tree]): js.JSConstructorBody = { + js.JSConstructorBody(before ::: body.beforeSuper, body.superCall, + body.afterSuper ::: after)(body.pos) + } + private sealed trait JSCtor { val sym: Symbol val paramsAndInfo: List[(Symbol, JSParamInfo)] @@ -1362,7 +1377,7 @@ class JSCodeGen()(using genCtx: Context) { private class PrimaryJSCtor(val sym: Symbol, val paramsAndInfo: List[(Symbol, JSParamInfo)], - val body: List[js.Tree]) extends JSCtor + val body: js.JSConstructorBody) extends JSCtor private class SplitSecondaryJSCtor(val sym: Symbol, val paramsAndInfo: List[(Symbol, JSParamInfo)], @@ -1945,9 +1960,9 @@ class JSCodeGen()(using genCtx: Context) { }*/ thisLocalVarIdent.fold[js.Tree] { - js.This()(currentClassType) + js.This()(currentThisType) } { thisLocalIdent => - js.VarRef(thisLocalIdent)(currentClassType) + js.VarRef(thisLocalIdent)(currentThisType) } } @@ -2014,9 +2029,7 @@ class JSCodeGen()(using genCtx: Context) { val (exceptValDef, exceptVar) = if (mightCatchJavaScriptException) { val valDef = js.VarDef(freshLocalIdent("e"), NoOriginalName, - encodeClassType(defn.ThrowableClass), mutable = false, { - genModuleApplyMethod(jsdefn.Runtime_wrapJavaScriptException, origExceptVar :: Nil) - }) + encodeClassType(defn.ThrowableClass), mutable = false, js.WrapAsThrowable(origExceptVar)) (valDef, valDef.ref) } else { (js.Skip(), origExceptVar) @@ -2307,7 +2320,7 @@ class JSCodeGen()(using genCtx: Context) { val privateFieldDefs = mutable.ListBuffer.empty[js.FieldDef] val classDefMembers = mutable.ListBuffer.empty[js.MemberDef] val instanceMembers = mutable.ListBuffer.empty[js.MemberDef] - var constructor: Option[js.JSMethodDef] = None + var constructor: Option[js.JSConstructorDef] = None originalClassDef.memberDefs.foreach { case fdef: js.FieldDef => @@ -2321,17 +2334,13 @@ class JSCodeGen()(using genCtx: Context) { "Non-static, unexported method in non-native JS class") classDefMembers += mdef - case mdef: js.JSMethodDef => - mdef.name match { - case js.StringLiteral("constructor") => - assert(!mdef.flags.namespace.isStatic, "Exported static method") - assert(constructor.isEmpty, "two ctors in class") - constructor = Some(mdef) + case cdef: js.JSConstructorDef => + assert(constructor.isEmpty, "two ctors in class") + constructor = Some(cdef) - case _ => - assert(!mdef.flags.namespace.isStatic, "Exported static method") - instanceMembers += mdef - } + case mdef: js.JSMethodDef => + assert(!mdef.flags.namespace.isStatic, "Exported static method") + instanceMembers += mdef case property: js.JSPropertyDef => instanceMembers += property @@ -2361,7 +2370,7 @@ class JSCodeGen()(using genCtx: Context) { val jsClassCaptures = originalClassDef.jsClassCaptures.getOrElse { throw new AssertionError(s"no class captures for anonymous JS class at $pos") } - val js.JSMethodDef(_, _, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { + val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { throw new AssertionError("No ctor found") } assert(ctorParams.isEmpty && ctorRestParam.isEmpty, @@ -2396,6 +2405,9 @@ class JSCodeGen()(using genCtx: Context) { case mdef: js.MethodDef => throw new AssertionError("unexpected MethodDef") + case cdef: js.JSConstructorDef => + throw new AssertionError("unexpected JSConstructorDef") + case mdef: js.JSMethodDef => implicit val pos = mdef.pos val impl = memberLambda(mdef.args, mdef.restParam, mdef.body) @@ -2468,36 +2480,43 @@ class JSCodeGen()(using genCtx: Context) { } // Transform the constructor body. - val inlinedCtorStats = new ir.Transformers.Transformer { - override def transform(tree: js.Tree, isStat: Boolean): js.Tree = tree match { - // The super constructor call. Transform this into a simple new call. - case js.JSSuperConstructorCall(args) => - implicit val pos = tree.pos - - val newTree = { - val ident = originalClassDef.superClass.getOrElse(throw new FatalError("No superclass")) - if (args.isEmpty && ident.name == JSObjectClassName) - js.JSObjectConstr(Nil) - else - js.JSNew(jsSuperClassRef, args) - } - - js.Block( - js.VarDef(selfName, thisOriginalName, jstpe.AnyType, mutable = false, newTree) :: - memberDefinitions) + val inlinedCtorStats: List[js.Tree] = { + val beforeSuper = ctorBody.beforeSuper - case js.This() => - selfRef(tree.pos) + val superCall = { + implicit val pos = ctorBody.superCall.pos + val js.JSSuperConstructorCall(args) = ctorBody.superCall - // Don't traverse closure boundaries - case closure: js.Closure => - val newCaptureValues = closure.captureValues.map(transformExpr) - closure.copy(captureValues = newCaptureValues)(closure.pos) + val newTree = { + val ident = originalClassDef.superClass.getOrElse(throw new FatalError("No superclass")) + if (args.isEmpty && ident.name == JSObjectClassName) + js.JSObjectConstr(Nil) + else + js.JSNew(jsSuperClassRef, args) + } - case tree => - super.transform(tree, isStat) + val selfVarDef = js.VarDef(selfName, thisOriginalName, jstpe.AnyType, mutable = false, newTree) + selfVarDef :: memberDefinitions } - }.transform(ctorBody, isStat = true) + + // After the super call, substitute `selfRef` for `This()` + val afterSuper = new ir.Transformers.Transformer { + override def transform(tree: js.Tree, isStat: Boolean): js.Tree = tree match { + case js.This() => + selfRef(tree.pos) + + // Don't traverse closure boundaries + case closure: js.Closure => + val newCaptureValues = closure.captureValues.map(transformExpr) + closure.copy(captureValues = newCaptureValues)(closure.pos) + + case tree => + super.transform(tree, isStat) + } + }.transformStats(ctorBody.afterSuper) + + beforeSuper ::: superCall ::: afterSuper + } val closure = js.Closure(arrow = true, jsClassCaptures, Nil, None, js.Block(inlinedCtorStats, selfRef), jsSuperClassValue :: args) @@ -2926,7 +2945,7 @@ class JSCodeGen()(using genCtx: Context) { case defn.ArrayOf(el) => el case JavaArrayType(el) => el case tpe => - val msg = ex"expected Array $tpe" + val msg = em"expected Array $tpe" report.error(msg) ErrorType(msg) } @@ -2989,14 +3008,12 @@ class JSCodeGen()(using genCtx: Context) { implicit val pos: SourcePosition = tree.sourcePos val exception = args.head val genException = genExpr(exception) - js.Throw { - if (exception.tpe.typeSymbol.derivesFrom(jsdefn.JavaScriptExceptionClass)) { - genModuleApplyMethod( - jsdefn.Runtime_unwrapJavaScriptException, - List(genException)) - } else { - genException - } + genException match { + case js.New(cls, _, _) if cls != JavaScriptExceptionClassName => + // Common case where ex is neither null nor a js.JavaScriptException + js.Throw(genException) + case _ => + js.Throw(js.UnwrapFromThrowable(genException)) } } @@ -3144,7 +3161,23 @@ class JSCodeGen()(using genCtx: Context) { val tpe = atPhase(elimErasedValueTypePhase) { sym.info.finalResultType } - unbox(boxedResult, tpe) + if (tpe.isRef(defn.BoxedUnitClass) && sym.isGetter) { + /* Work around to reclaim Scala 2 erasure behavior, assumed by the test + * NonNativeJSTypeTest.defaultValuesForFields. + * Scala 2 erases getters of `Unit`-typed fields as returning `Unit` + * (not `BoxedUnit`). Therefore, when called in expression position, + * the call site introduces an explicit `BoxedUnit.UNIT`. Even if the + * field has not been initialized at all (with `= _`), this results in + * an actual `()` value. + * In Scala 3, the same pattern returns `null`, as a `BoxedUnit`, so we + * introduce here an explicit `()` value. + * TODO We should remove this branch if the upstream test is updated + * not to assume such a strict interpretation of erasure. + */ + js.Block(boxedResult, js.Undefined()) + } else { + unbox(boxedResult, tpe) + } } } @@ -3499,13 +3532,16 @@ class JSCodeGen()(using genCtx: Context) { val closure = js.Closure(arrow = true, formalCaptures, formalParams, restParam, genBody, actualCaptures) if (!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym)) { - assert(!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym), - s"Invalid functional interface $funInterfaceSym reached the back-end") val formalCount = formalParams.size val cls = ClassName("scala.scalajs.runtime.AnonFunction" + formalCount) val ctorName = MethodName.constructor( jstpe.ClassRef(ClassName("scala.scalajs.js.Function" + formalCount)) :: Nil) js.New(cls, js.MethodIdent(ctorName), List(closure)) + } else if (funInterfaceSym.name == tpnme.FunctionXXL && funInterfaceSym.owner == defn.ScalaRuntimePackageClass) { + val cls = ClassName("scala.scalajs.runtime.AnonFunctionXXL") + val ctorName = MethodName.constructor( + jstpe.ClassRef(ClassName("scala.scalajs.js.Function1")) :: Nil) + js.New(cls, js.MethodIdent(ctorName), List(closure)) } else { assert(funInterfaceSym.isJSType, s"Invalid functional interface $funInterfaceSym reached the back-end") @@ -3636,7 +3672,7 @@ class JSCodeGen()(using genCtx: Context) { } else if (sym.isJSType) { if (sym.is(Trait)) { report.error( - s"isInstanceOf[${sym.fullName}] not supported because it is a JS trait", + em"isInstanceOf[${sym.fullName}] not supported because it is a JS trait", pos) js.BooleanLiteral(true) } else { @@ -3966,6 +4002,53 @@ class JSCodeGen()(using genCtx: Context) { js.JSFunctionApply(fVarDef.ref, List(keyVarRef)) })) + case JS_THROW => + // js.special.throw(arg) + js.Throw(genArgs1) + + case JS_TRY_CATCH => + /* js.special.tryCatch(arg1, arg2) + * + * We must generate: + * + * val body = arg1 + * val handler = arg2 + * try { + * body() + * } catch (e) { + * handler(e) + * } + * + * with temporary vals, because `arg2` must be evaluated before + * `body` executes. Moreover, exceptions thrown while evaluating + * the function values `arg1` and `arg2` must not be caught. + */ + val (arg1, arg2) = genArgs2 + val bodyVarDef = js.VarDef(freshLocalIdent("body"), NoOriginalName, + jstpe.AnyType, mutable = false, arg1) + val handlerVarDef = js.VarDef(freshLocalIdent("handler"), NoOriginalName, + jstpe.AnyType, mutable = false, arg2) + val exceptionVarIdent = freshLocalIdent("e") + val exceptionVarRef = js.VarRef(exceptionVarIdent)(jstpe.AnyType) + js.Block( + bodyVarDef, + handlerVarDef, + js.TryCatch( + js.JSFunctionApply(bodyVarDef.ref, Nil), + exceptionVarIdent, + NoOriginalName, + js.JSFunctionApply(handlerVarDef.ref, List(exceptionVarRef)) + )(jstpe.AnyType) + ) + + case WRAP_AS_THROWABLE => + // js.special.wrapAsThrowable(arg) + js.WrapAsThrowable(genArgs1) + + case UNWRAP_FROM_THROWABLE => + // js.special.unwrapFromThrowable(arg) + js.UnwrapFromThrowable(genArgs1) + case UNION_FROM | UNION_FROM_TYPE_CONSTRUCTOR => /* js.|.from and js.|.fromTypeConstructor * We should not have to deal with those. They have a perfectly valid @@ -4748,6 +4831,7 @@ object JSCodeGen { private val NullPointerExceptionClass = ClassName("java.lang.NullPointerException") private val JSObjectClassName = ClassName("scala.scalajs.js.Object") + private val JavaScriptExceptionClassName = ClassName("scala.scalajs.js.JavaScriptException") private val ObjectClassRef = jstpe.ClassRef(ir.Names.ObjectClass) diff --git a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala index c252ac892548..5336d60129ac 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala @@ -162,10 +162,6 @@ final class JSDefinitions()(using Context) { @threadUnsafe lazy val RuntimePackageVal = requiredPackage("scala.scalajs.runtime") @threadUnsafe lazy val RuntimePackageClass = RuntimePackageVal.moduleClass.asClass - @threadUnsafe lazy val RuntimePackage_wrapJavaScriptExceptionR = RuntimePackageClass.requiredMethodRef("wrapJavaScriptException") - def Runtime_wrapJavaScriptException(using Context) = RuntimePackage_wrapJavaScriptExceptionR.symbol - @threadUnsafe lazy val Runtime_unwrapJavaScriptExceptionR = RuntimePackageClass.requiredMethodRef("unwrapJavaScriptException") - def Runtime_unwrapJavaScriptException(using Context) = Runtime_unwrapJavaScriptExceptionR.symbol @threadUnsafe lazy val Runtime_toScalaVarArgsR = RuntimePackageClass.requiredMethodRef("toScalaVarArgs") def Runtime_toScalaVarArgs(using Context) = Runtime_toScalaVarArgsR.symbol @threadUnsafe lazy val Runtime_toJSVarArgsR = RuntimePackageClass.requiredMethodRef("toJSVarArgs") @@ -206,6 +202,14 @@ final class JSDefinitions()(using Context) { def Special_instanceof(using Context) = Special_instanceofR.symbol @threadUnsafe lazy val Special_strictEqualsR = SpecialPackageClass.requiredMethodRef("strictEquals") def Special_strictEquals(using Context) = Special_strictEqualsR.symbol + @threadUnsafe lazy val Special_throwR = SpecialPackageClass.requiredMethodRef("throw") + def Special_throw(using Context) = Special_throwR.symbol + @threadUnsafe lazy val Special_tryCatchR = SpecialPackageClass.requiredMethodRef("tryCatch") + def Special_tryCatch(using Context) = Special_tryCatchR.symbol + @threadUnsafe lazy val Special_wrapAsThrowableR = SpecialPackageClass.requiredMethodRef("wrapAsThrowable") + def Special_wrapAsThrowable(using Context) = Special_wrapAsThrowableR.symbol + @threadUnsafe lazy val Special_unwrapFromThrowableR = SpecialPackageClass.requiredMethodRef("unwrapFromThrowable") + def Special_unwrapFromThrowable(using Context) = Special_unwrapFromThrowableR.symbol @threadUnsafe lazy val WrappedArrayType: TypeRef = requiredClassRef("scala.scalajs.js.WrappedArray") def WrappedArrayClass(using Context) = WrappedArrayType.symbol.asClass diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index 0884ec19b53e..78412999bb34 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -135,8 +135,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { for ((info, _) <- tups.tail) { report.error( - em"export overload conflicts with export of $firstSym: " + - "a field may not share its exported name with another export", + em"export overload conflicts with export of $firstSym: a field may not share its exported name with another export", info.pos) } @@ -264,8 +263,8 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { .alternatives assert(!alts.isEmpty, - em"Ended up with no alternatives for ${classSym.fullName}::$name. " + - em"Original set was ${alts} with types ${alts.map(_.info)}") + em"""Ended up with no alternatives for ${classSym.fullName}::$name. + |Original set was ${alts} with types ${alts.map(_.info)}""") val (jsName, isProp) = exportNameInfo(name) @@ -309,7 +308,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { if (isProp && methodSyms.nonEmpty) { val firstAlt = alts.head report.error( - i"Conflicting properties and methods for ${classSym.fullName}::$name.", + em"Conflicting properties and methods for ${classSym.fullName}::$name.", firstAlt.srcPos) implicit val pos = firstAlt.span js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None) @@ -613,7 +612,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { val altsTypesInfo = alts.map(_.info.show).sorted.mkString("\n ") report.error( - s"Cannot disambiguate overloads for $fullKind $displayName with types\n $altsTypesInfo", + em"Cannot disambiguate overloads for $fullKind $displayName with types\n $altsTypesInfo", pos) } @@ -650,7 +649,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { js.LoadJSConstructor(encodeClassName(superClassSym)) } - val receiver = js.This()(jstpe.AnyType) + val receiver = js.This()(currentThisType) val nameTree = genExpr(sym.jsName) if (sym.isJSGetter) { @@ -754,7 +753,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { genApplyMethodMaybeStatically(receiver, modAccessor, Nil) } } else { - js.This()(encodeClassType(targetSym)) + js.This()(currentThisType) } } @@ -811,7 +810,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { def receiver = if (static) genLoadModule(sym.owner) - else js.This()(encodeClassType(currentClass)) + else js.This()(currentThisType) def boxIfNeeded(call: js.Tree): js.Tree = box(call, atPhase(elimErasedValueTypePhase)(sym.info.resultType)) diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala index 9b19e66058e8..2fd007165952 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala @@ -6,6 +6,7 @@ import java.net.{URI, URISyntaxException} import dotty.tools.dotc.core._ import Contexts._ +import Decorators.em import dotty.tools.dotc.report @@ -31,7 +32,7 @@ class JSPositions()(using Context) { URIMap(from, to) :: Nil } catch { case e: URISyntaxException => - report.error(s"${e.getInput} is not a valid URI") + report.error(em"${e.getInput} is not a valid URI") Nil } } diff --git a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala index 6b3854ed677f..029273aed54b 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala @@ -5,6 +5,7 @@ import Names.TermName import Types._ import Contexts._ import Symbols._ +import Decorators.em import dotty.tools.dotc.ast.tpd._ import dotty.tools.backend.jvm.DottyPrimitives @@ -36,12 +37,16 @@ object JSPrimitives { inline val LINKING_INFO = WITH_CONTEXTUAL_JS_CLASS_VALUE + 1 // runtime.linkingInfo inline val DYNAMIC_IMPORT = LINKING_INFO + 1 // runtime.dynamicImport - inline val STRICT_EQ = DYNAMIC_IMPORT + 1 // js.special.strictEquals - inline val IN = STRICT_EQ + 1 // js.special.in - inline val INSTANCEOF = IN + 1 // js.special.instanceof - inline val DELETE = INSTANCEOF + 1 // js.special.delete - inline val FORIN = DELETE + 1 // js.special.forin - inline val DEBUGGER = FORIN + 1 // js.special.debugger + inline val STRICT_EQ = DYNAMIC_IMPORT + 1 // js.special.strictEquals + inline val IN = STRICT_EQ + 1 // js.special.in + inline val INSTANCEOF = IN + 1 // js.special.instanceof + inline val DELETE = INSTANCEOF + 1 // js.special.delete + inline val FORIN = DELETE + 1 // js.special.forin + inline val JS_THROW = FORIN + 1 // js.special.throw + inline val JS_TRY_CATCH = JS_THROW + 1 // js.special.tryCatch + inline val WRAP_AS_THROWABLE = JS_TRY_CATCH + 1 // js.special.wrapAsThrowable + inline val UNWRAP_FROM_THROWABLE = WRAP_AS_THROWABLE + 1 // js.special.unwrapFromThrowable + inline val DEBUGGER = UNWRAP_FROM_THROWABLE + 1 // js.special.debugger inline val THROW = DEBUGGER + 1 @@ -90,7 +95,7 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { val alts = cls.info.member(method).alternatives.map(_.symbol) if (alts.isEmpty) { - report.error(s"Unknown primitive method $cls.$method") + report.error(em"Unknown primitive method $cls.$method") } else { for (s <- alts) addPrimitive(s, code) @@ -125,6 +130,10 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { addPrimitive(jsdefn.Special_instanceof, INSTANCEOF) addPrimitive(jsdefn.Special_delete, DELETE) addPrimitive(jsdefn.Special_forin, FORIN) + addPrimitive(jsdefn.Special_throw, JS_THROW) + addPrimitive(jsdefn.Special_tryCatch, JS_TRY_CATCH) + addPrimitive(jsdefn.Special_wrapAsThrowable, WRAP_AS_THROWABLE) + addPrimitive(jsdefn.Special_unwrapFromThrowable, UNWRAP_FROM_THROWABLE) addPrimitive(jsdefn.Special_debugger, DEBUGGER) addPrimitive(defn.throwMethod, THROW) diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala index c9c032b0ae7d..5f5e9fc799b5 100644 --- a/compiler/src/dotty/tools/dotc/Bench.scala +++ b/compiler/src/dotty/tools/dotc/Bench.scala @@ -14,24 +14,22 @@ import scala.annotation.internal.sharable object Bench extends Driver: @sharable private var numRuns = 1 - - private def ntimes(n: Int)(op: => Reporter): Reporter = - (0 until n).foldLeft(emptyReporter)((_, _) => op) - + @sharable private var numCompilers = 1 + @sharable private var waitAfter = -1 + @sharable private var curCompiler = 0 @sharable private var times: Array[Int] = _ override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = - times = new Array[Int](numRuns) var reporter: Reporter = emptyReporter for i <- 0 until numRuns do + val curRun = curCompiler * numRuns + i val start = System.nanoTime() reporter = super.doCompile(compiler, files) - times(i) = ((System.nanoTime - start) / 1000000).toInt - println(s"time elapsed: ${times(i)}ms") - if ctx.settings.Xprompt.value then + times(curRun) = ((System.nanoTime - start) / 1000000).toInt + println(s"time elapsed: ${times(curRun)}ms") + if ctx.settings.Xprompt.value || waitAfter == curRun + 1 then print("hit to continue >") System.in.nn.read() - println() reporter def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = { @@ -42,20 +40,26 @@ object Bench extends Driver: def reportTimes() = val best = times.sorted - val measured = numRuns / 3 + val measured = numCompilers * numRuns / 3 val avgBest = best.take(measured).sum / measured val avgLast = times.reverse.take(measured).sum / measured - println(s"best out of $numRuns runs: ${best(0)}") + println(s"best out of ${numCompilers * numRuns} runs: ${best(0)}") println(s"average out of best $measured: $avgBest") println(s"average out of last $measured: $avgLast") - override def process(args: Array[String], rootCtx: Context): Reporter = + override def process(args: Array[String]): Reporter = val (numCompilers, args1) = extractNumArg(args, "#compilers") val (numRuns, args2) = extractNumArg(args1, "#runs") + val (waitAfter, args3) = extractNumArg(args2, "#wait-after", -1) + this.numCompilers = numCompilers this.numRuns = numRuns + this.waitAfter = waitAfter + this.times = new Array[Int](numCompilers * numRuns) var reporter: Reporter = emptyReporter - for i <- 0 until numCompilers do - reporter = super.process(args2, rootCtx) + curCompiler = 0 + while curCompiler < numCompilers do + reporter = super.process(args3) + curCompiler += 1 reportTimes() reporter diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index a6069e2749a9..046b649941b1 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -9,11 +9,15 @@ import util.{FreshNameCreator, SourceFile, NoSource} import util.Spans.Span import ast.{tpd, untpd} import tpd.{Tree, TreeTraverser} +import ast.Trees.{Import, Ident} import typer.Nullables import transform.SymUtils._ import core.Decorators._ -import config.SourceVersion +import config.{SourceVersion, Feature} +import StdNames.nme import scala.annotation.internal.sharable +import scala.util.control.NoStackTrace +import transform.MacroAnnotations class CompilationUnit protected (val source: SourceFile) { @@ -43,6 +47,8 @@ class CompilationUnit protected (val source: SourceFile) { */ var needsInlining: Boolean = false + var hasMacroAnnotations: Boolean = false + /** Set to `true` if inliner added anonymous mirrors that need to be completed */ var needsMirrorSupport: Boolean = false @@ -51,6 +57,12 @@ class CompilationUnit protected (val source: SourceFile) { */ var needsStaging: Boolean = false + /** Will be set to true if the unit contains a captureChecking language import */ + var needsCaptureChecking: Boolean = false + + /** Will be set to true if the unit contains a pureFunctions language import */ + var knowsPureFuns: Boolean = false + var suspended: Boolean = false var suspendedAtInliningPhase: Boolean = false @@ -94,7 +106,7 @@ class CompilationUnit protected (val source: SourceFile) { object CompilationUnit { - class SuspendException extends Exception + class SuspendException extends Exception with NoStackTrace /** Make a compilation unit for top class `clsd` with the contents of the `unpickled` tree */ def apply(clsd: ClassDenotation, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = @@ -111,6 +123,7 @@ object CompilationUnit { force.traverse(unit1.tpdTree) unit1.needsStaging = force.containsQuote unit1.needsInlining = force.containsInline + unit1.hasMacroAnnotations = force.containsMacroAnnotation } unit1 } @@ -123,11 +136,11 @@ object CompilationUnit { if (!mustExist) source else if (source.file.isDirectory) { - report.error(s"expected file, received directory '${source.file.path}'") + report.error(em"expected file, received directory '${source.file.path}'") NoSource } else if (!source.file.exists) { - report.error(s"source file not found: ${source.file.path}") + report.error(em"source file not found: ${source.file.path}") NoSource } else source @@ -138,11 +151,24 @@ object CompilationUnit { private class Force extends TreeTraverser { var containsQuote = false var containsInline = false + var containsCaptureChecking = false + var containsMacroAnnotation = false def traverse(tree: Tree)(using Context): Unit = { if (tree.symbol.isQuote) containsQuote = true if tree.symbol.is(Flags.Inline) then containsInline = true + tree match + case Import(qual, selectors) => + tpd.languageImport(qual) match + case Some(prefix) => + for case untpd.ImportSelector(untpd.Ident(imported), untpd.EmptyTree, _) <- selectors do + Feature.handleGlobalLanguageImport(prefix, imported) + case _ => + case _ => + for annot <- tree.symbol.annotations do + if MacroAnnotations.isMacroAnnotation(annot) then + ctx.compilationUnit.hasMacroAnnotations = true traverseChildren(tree) } } diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 46d36c4412c7..db3d42d32190 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -35,6 +35,7 @@ class Compiler { protected def frontendPhases: List[List[Phase]] = List(new Parser) :: // Compiler frontend: scanner, parser List(new TyperPhase) :: // Compiler frontend: namer, typer + List(new CheckUnused) :: // Check for unused elements List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files @@ -81,13 +82,14 @@ class Compiler { new PatternMatcher) :: // Compile pattern matches List(new TestRecheck.Pre) :: // Test only: run rechecker, enabled under -Yrecheck-test List(new TestRecheck) :: // Test only: run rechecker, enabled under -Yrecheck-test - List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under -Ycc - List(new CheckCaptures) :: // Check captures, enabled under -Ycc + List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under captureChecking + List(new CheckCaptures) :: // Check captures, enabled under captureChecking List(new ElimOpaque, // Turn opaque into normal aliases new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) new ExplicitOuter, // Add accessors to outer classes from nested ones. new ExplicitSelf, // Make references to non-trivial self types explicit as casts - new StringInterpolatorOpt) :: // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats + new StringInterpolatorOpt, // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats + new DropBreaks) :: // Optimize local Break throws by rewriting them List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions new UninitializedDefs, // Replaces `compiletime.uninitialized` by `_` new InlinePatterns, // Remove placeholders of inlined patterns diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 14a71463c66d..e548cae55ddd 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -30,18 +30,20 @@ class Driver { protected def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = if files.nonEmpty then + var runOrNull = ctx.run try val run = compiler.newRun + runOrNull = run run.compile(files) finish(compiler, run) catch case ex: FatalError => report.error(ex.getMessage.nn) // signals that we should fail compilation. - case ex: TypeError => - println(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}") + case ex: TypeError if !runOrNull.enrichedErrorMessage => + println(runOrNull.enrichErrorMessage(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}")) throw ex - case ex: Throwable => - println(s"$ex while compiling ${files.map(_.path).mkString(", ")}") + case ex: Throwable if !runOrNull.enrichedErrorMessage => + println(runOrNull.enrichErrorMessage(s"Exception while compiling ${files.map(_.path).mkString(", ")}")) throw ex ctx.reporter @@ -94,7 +96,7 @@ class Driver { val newEntries: List[String] = files .flatMap { file => if !file.exists then - report.error(s"File does not exist: ${file.path}") + report.error(em"File does not exist: ${file.path}") None else file.extension match case "jar" => Some(file.path) @@ -102,10 +104,10 @@ class Driver { TastyFileUtil.getClassPath(file) match case Some(classpath) => Some(classpath) case _ => - report.error(s"Could not load classname from: ${file.path}") + report.error(em"Could not load classname from: ${file.path}") None case _ => - report.error(s"File extension is not `tasty` or `jar`: ${file.path}") + report.error(em"File extension is not `tasty` or `jar`: ${file.path}") None } .distinct @@ -171,7 +173,7 @@ class Driver { * the other overloads without worrying about breaking compatibility * with sbt. */ - final def process(args: Array[String]): Reporter = + def process(args: Array[String]): Reporter = process(args, null: Reporter | Null, null: interfaces.CompilerCallback | Null) /** Entry point to the compiler using a custom `Context`. diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index f9152e8294c6..944ae794c94f 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -163,15 +163,24 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** Actions that need to be performed at the end of the current compilation run */ private var finalizeActions = mutable.ListBuffer[() => Unit]() + /** Will be set to true if any of the compiled compilation units contains + * a pureFunctions language import. + */ + var pureFunsImportEncountered = false + + /** Will be set to true if any of the compiled compilation units contains + * a captureChecking language import. + */ + var ccImportEncountered = false + + private var myEnrichedErrorMessage = false + def compile(files: List[AbstractFile]): Unit = - try - val sources = files.map(runContext.getSource(_)) - compileSources(sources) - catch - case NonFatal(ex) => - if units.nonEmpty then report.echo(i"exception occurred while compiling $units%, %") - else report.echo(s"exception occurred while compiling ${files.map(_.name).mkString(", ")}") - throw ex + try compileSources(files.map(runContext.getSource(_))) + catch case NonFatal(ex) if !this.enrichedErrorMessage => + val files1 = if units.isEmpty then files else units.map(_.source.file) + report.echo(this.enrichErrorMessage(s"exception occurred while compiling ${files1.map(_.path)}")) + throw ex /** TODO: There's a fundamental design problem here: We assemble phases using `fusePhases` * when we first build the compiler. But we modify them with -Yskip, -Ystop @@ -221,9 +230,13 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value) ctx.base.usePhases(phases) + if ctx.settings.YnoDoubleBindings.value then + ctx.base.checkNoDoubleBindings = true + def runPhases(using Context) = { var lastPrintedTree: PrintedTree = NoPrintedTree val profiler = ctx.profiler + var phasesWereAdjusted = false for (phase <- ctx.base.allPhases) if (phase.isRunnable) @@ -242,6 +255,11 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) ctx.typerState.gc() } + if !phasesWereAdjusted then + phasesWereAdjusted = true + if !Feature.ccEnabledSomewhere then + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) profiler.finished() } @@ -379,3 +397,16 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint given runContext[Dummy_so_its_a_def]: Context = myCtx.nn assert(runContext.runId <= Periods.MaxPossibleRunId) } + +object Run { + extension (run: Run | Null) + def enrichedErrorMessage: Boolean = if run == null then false else run.myEnrichedErrorMessage + def enrichErrorMessage(errorMessage: String)(using Context): String = + if run == null then + report.enrichErrorMessage(errorMessage) + else if !run.enrichedErrorMessage then + run.myEnrichedErrorMessage = true + report.enrichErrorMessage(errorMessage) + else + errorMessage +} diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 1e1db19bcf25..c1dd78451bae 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -6,6 +6,7 @@ import core._ import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._ import Symbols._, StdNames._, Trees._, ContextOps._ import Decorators._, transform.SymUtils._ +import Annotations.Annotation import NameKinds.{UniqueName, EvidenceParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, Chars} @@ -117,7 +118,7 @@ object desugar { if (local.exists) (defctx.owner.thisType select local).dealiasKeepAnnots else { def msg = - s"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" + em"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" ErrorType(msg).assertingErrorsReported(msg) } case _ => @@ -165,32 +166,41 @@ object desugar { * * Generate setter where needed */ - def valDef(vdef0: ValDef)(using Context): Tree = { + def valDef(vdef0: ValDef)(using Context): Tree = val vdef @ ValDef(_, tpt, rhs) = vdef0 - val mods = vdef.mods - val valName = normalizeName(vdef, tpt).asTermName - val vdef1 = cpy.ValDef(vdef)(name = valName) + var mods1 = vdef.mods + + def dropInto(tpt: Tree): Tree = tpt match + case Into(tpt1) => + mods1 = vdef.mods.withAddedAnnotation( + TypedSplice( + Annotation(defn.AllowConversionsAnnot, tpt.span.startPos).tree)) + tpt1 + case ByNameTypeTree(tpt1) => + cpy.ByNameTypeTree(tpt)(dropInto(tpt1)) + case PostfixOp(tpt1, op) if op.name == tpnme.raw.STAR => + cpy.PostfixOp(tpt)(dropInto(tpt1), op) + case _ => + tpt + + val vdef1 = cpy.ValDef(vdef)(name = valName, tpt = dropInto(tpt)) + .withMods(mods1) - if (isSetterNeeded(vdef)) { - // TODO: copy of vdef as getter needed? - // val getter = ValDef(mods, name, tpt, rhs) withPos vdef.pos? - // right now vdef maps via expandedTree to a thicket which concerns itself. - // I don't see a problem with that but if there is one we can avoid it by making a copy here. + if isSetterNeeded(vdef) then val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) // The rhs gets filled in later, when field is generated and getter has parameters (see Memoize miniphase) val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral val setter = cpy.DefDef(vdef)( - name = valName.setterName, - paramss = (setterParam :: Nil) :: Nil, - tpt = TypeTree(defn.UnitType), - rhs = setterRhs - ).withMods((mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) - .dropEndMarker() // the end marker should only appear on the getter definition + name = valName.setterName, + paramss = (setterParam :: Nil) :: Nil, + tpt = TypeTree(defn.UnitType), + rhs = setterRhs + ).withMods((vdef.mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) + .dropEndMarker() // the end marker should only appear on the getter definition Thicket(vdef1, setter) - } else vdef1 - } + end valDef def makeImplicitParameters(tpts: List[Tree], implicitFlag: FlagSet, forPrimaryConstructor: Boolean = false)(using Context): List[ValDef] = for (tpt <- tpts) yield { @@ -905,16 +915,16 @@ object desugar { name = normalizeName(mdef, mdef.tpt).asTermName, paramss = if mdef.name.isRightAssocOperatorName then - val (typaramss, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters + val (rightTyParams, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters paramss match - case params :: paramss1 => // `params` must have a single parameter and without `given` flag + case rightParam :: paramss1 => // `rightParam` must have a single parameter and without `given` flag def badRightAssoc(problem: String) = - report.error(i"right-associative extension method $problem", mdef.srcPos) + report.error(em"right-associative extension method $problem", mdef.srcPos) extParamss ++ mdef.paramss - params match + rightParam match case ValDefs(vparam :: Nil) => if !vparam.mods.is(Given) then // we merge the extension parameters with the method parameters, @@ -924,8 +934,10 @@ object desugar { // def %:[E](f: F)(g: G)(using H): Res = ??? // will be encoded as // def %:[A](using B)[E](f: F)(c: C)(using D)(g: G)(using H): Res = ??? - val (leadingUsing, otherExtParamss) = extParamss.span(isUsingOrTypeParamClause) - leadingUsing ::: typaramss ::: params :: otherExtParamss ::: paramss1 + // + // If you change the names of the clauses below, also change them in right-associative-extension-methods.md + val (leftTyParamsAndLeadingUsing, leftParamAndTrailingUsing) = extParamss.span(isUsingOrTypeParamClause) + leftTyParamsAndLeadingUsing ::: rightTyParams ::: rightParam :: leftParamAndTrailingUsing ::: paramss1 else badRightAssoc("cannot start with using clause") case _ => @@ -1137,7 +1149,7 @@ object desugar { def errorOnGivenBinding(bind: Bind)(using Context): Boolean = report.error( em"""${hl("given")} patterns are not allowed in a ${hl("val")} definition, - |please bind to an identifier and use an alias given.""".stripMargin, bind) + |please bind to an identifier and use an alias given.""", bind) false def isTuplePattern(arity: Int): Boolean = pat match { @@ -1237,7 +1249,7 @@ object desugar { def checkOpaqueAlias(tree: MemberDef)(using Context): MemberDef = def check(rhs: Tree): MemberDef = rhs match case bounds: TypeBoundsTree if bounds.alias.isEmpty => - report.error(i"opaque type must have a right-hand side", tree.srcPos) + report.error(em"opaque type must have a right-hand side", tree.srcPos) tree.withMods(tree.mods.withoutFlags(Opaque)) case LambdaTypeTree(_, body) => check(body) case _ => tree @@ -1454,7 +1466,10 @@ object desugar { val param = makeSyntheticParameter( tpt = if params.exists(_.tpt.isEmpty) then TypeTree() - else Tuple(params.map(_.tpt))) + else Tuple(params.map(_.tpt)), + flags = + if params.nonEmpty && params.head.mods.is(Given) then SyntheticTermParam | Given + else SyntheticTermParam) def selector(n: Int) = if (isGenericTuple) Apply(Select(refOfDef(param), nme.apply), Literal(Constant(n))) else Select(refOfDef(param), nme.selectorName(n)) @@ -1483,10 +1498,10 @@ object desugar { case vd: ValDef => vd } - def makeContextualFunction(formals: List[Tree], body: Tree, isErased: Boolean)(using Context): Function = { - val mods = if (isErased) Given | Erased else Given + def makeContextualFunction(formals: List[Tree], body: Tree, erasedParams: List[Boolean])(using Context): Function = { + val mods = Given val params = makeImplicitParameters(formals, mods) - FunctionWithMods(params, body, Modifiers(mods)) + FunctionWithMods(params, body, Modifiers(mods), erasedParams) } private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { @@ -1717,7 +1732,7 @@ object desugar { val applyVParams = vargs.zipWithIndex.map { case (p: ValDef, _) => p.withAddedFlags(mods.flags) - case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags) + case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags.toTermFlags) } RefinedTypeTree(polyFunctionTpt, List( DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) @@ -1819,6 +1834,7 @@ object desugar { cpy.ByNameTypeTree(parent)(annotate(tpnme.retainsByName, restpt)) case _ => annotate(tpnme.retains, parent) + case f: FunctionWithMods if f.hasErasedParams => makeFunctionWithValDefs(f, pt) } desugared.withSpan(tree.span) } @@ -1894,6 +1910,28 @@ object desugar { TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) } + /** Ensure the given function tree use only ValDefs for parameters. + * For example, + * FunctionWithMods(List(TypeTree(A), TypeTree(B)), body, mods, erasedParams) + * gets converted to + * FunctionWithMods(List(ValDef(x$1, A), ValDef(x$2, B)), body, mods, erasedParams) + */ + def makeFunctionWithValDefs(tree: Function, pt: Type)(using Context): Function = { + val Function(args, result) = tree + args match { + case (_ : ValDef) :: _ => tree // ValDef case can be easily handled + case _ if !ctx.mode.is(Mode.Type) => tree + case _ => + val applyVParams = args.zipWithIndex.map { + case (p, n) => makeSyntheticParameter(n + 1, p) + } + tree match + case tree: FunctionWithMods => + untpd.FunctionWithMods(applyVParams, tree.body, tree.mods, tree.erasedParams) + case _ => untpd.Function(applyVParams, result) + } + } + /** Returns list of all pattern variables, possibly with their types, * without duplicates */ diff --git a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala index 096a885dcf32..a1c3c0ed0775 100644 --- a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala +++ b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala @@ -75,8 +75,8 @@ object DesugarEnums { def problem = if (!tparam.isOneOf(VarianceFlags)) "is invariant" else "has bounds that depend on a type parameter in the same parameter list" - errorType(i"""cannot determine type argument for enum parent $enumClass, - |type parameter $tparam $problem""", ctx.source.atSpan(span)) + errorType(em"""cannot determine type argument for enum parent $enumClass, + |type parameter $tparam $problem""", ctx.source.atSpan(span)) } } TypeTree(enumClass.typeRef.appliedTo(targs)).withSpan(span) @@ -216,7 +216,7 @@ object DesugarEnums { case Ident(name) => val matches = tparamNames.contains(name) if (matches && (caseTypeParams.nonEmpty || vparamss.isEmpty)) - report.error(i"illegal reference to type parameter $name from enum case", tree.srcPos) + report.error(em"illegal reference to type parameter $name from enum case", tree.srcPos) matches case LambdaTypeTree(lambdaParams, body) => underBinders(lambdaParams, foldOver(x, tree)) diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala index 040582476e96..c0cf2c0d1b81 100644 --- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala +++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala @@ -56,7 +56,7 @@ object MainProxies { def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = if (mt.isImplicitMethod) { - report.error(s"@main method cannot have implicit parameters", pos) + report.error(em"@main method cannot have implicit parameters", pos) call } else { @@ -74,7 +74,7 @@ object MainProxies { mt.resType match { case restpe: MethodType => if (mt.paramInfos.lastOption.getOrElse(NoType).isRepeatedParam) - report.error(s"varargs parameter of @main method must come last", pos) + report.error(em"varargs parameter of @main method must come last", pos) addArgs(call1, restpe, idx + args.length) case _ => call1 @@ -83,7 +83,7 @@ object MainProxies { var result: List[TypeDef] = Nil if (!mainFun.owner.isStaticOwner) - report.error(s"@main method is not statically accessible", pos) + report.error(em"@main method is not statically accessible", pos) else { var call = ref(mainFun.termRef) mainFun.info match { @@ -91,9 +91,9 @@ object MainProxies { case mt: MethodType => call = addArgs(call, mt, 0) case _: PolyType => - report.error(s"@main method cannot have type parameters", pos) + report.error(em"@main method cannot have type parameters", pos) case _ => - report.error(s"@main can only annotate a method", pos) + report.error(em"@main can only annotate a method", pos) } val errVar = Ident(nme.error) val handler = CaseDef( @@ -203,7 +203,7 @@ object MainProxies { )) (sym, paramAnnotations.toVector, defaultValueSymbols(scope, sym), stat.rawComment) :: Nil case mainAnnot :: others => - report.error(s"method cannot have multiple main annotations", mainAnnot.tree) + report.error(em"method cannot have multiple main annotations", mainAnnot.tree) Nil } case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => @@ -379,26 +379,26 @@ object MainProxies { end generateMainClass if (!mainFun.owner.isStaticOwner) - report.error(s"main method is not statically accessible", pos) + report.error(em"main method is not statically accessible", pos) None else mainFun.info match { case _: ExprType => Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) case mt: MethodType => if (mt.isImplicitMethod) - report.error(s"main method cannot have implicit parameters", pos) + report.error(em"main method cannot have implicit parameters", pos) None else mt.resType match case restpe: MethodType => - report.error(s"main method cannot be curried", pos) + report.error(em"main method cannot be curried", pos) None case _ => Some(generateMainClass(unitToValue(Apply(ref(mainFun.termRef), argRefs(mt))), argValDefs(mt), parameterInfos(mt))) case _: PolyType => - report.error(s"main method cannot have type parameters", pos) + report.error(em"main method cannot have type parameters", pos) None case _ => - report.error(s"main can only annotate a method", pos) + report.error(em"main can only annotate a method", pos) None } } diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index bcedc4dfa50b..ace396d1e583 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -4,7 +4,7 @@ package ast import core.Contexts._ import core.Decorators._ import util.Spans._ -import Trees.{MemberDef, DefTree, WithLazyField} +import Trees.{MemberDef, DefTree, WithLazyFields} import dotty.tools.dotc.core.Types.AnnotatedType import dotty.tools.dotc.core.Types.ImportType import dotty.tools.dotc.core.Types.Type @@ -106,12 +106,15 @@ object NavigateAST { // FIXME: We shouldn't be manually forcing trees here, we should replace // our usage of `productIterator` by something in `Positioned` that takes // care of low-level details like this for us. - p match { - case p: WithLazyField[?] => - p.forceIfLazy + p match + case p: WithLazyFields => p.forceFields() case _ => - } - childPath(p.productIterator, p :: path) + val iterator = p match + case defdef: DefTree[?] => + p.productIterator ++ defdef.mods.productIterator + case _ => + p.productIterator + childPath(iterator, p :: path) } else { p match { diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala index d14addb8c9c7..dd783be7a9e1 100644 --- a/compiler/src/dotty/tools/dotc/ast/Positioned.scala +++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala @@ -154,14 +154,17 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src } } + private class LastPosRef: + var positioned: Positioned | Null = null + var span = NoSpan + /** Check that all positioned items in this tree satisfy the following conditions: * - Parent spans contain child spans * - If item is a non-empty tree, it has a position */ def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { import untpd._ - var lastPositioned: Positioned | Null = null - var lastSpan = NoSpan + val last = LastPosRef() def check(p: Any): Unit = p match { case p: Positioned => assert(span contains p.span, @@ -181,19 +184,19 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src case _: XMLBlock => // FIXME: Trees generated by the XML parser do not satisfy `checkPos` case _: WildcardFunction - if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] => + if last.positioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] => // ignore transition from last wildcard parameter to body case _ => - assert(!lastSpan.exists || !p.span.exists || lastSpan.end <= p.span.start, + assert(!last.span.exists || !p.span.exists || last.span.end <= p.span.start, i"""position error, child positions overlap or in wrong order |parent = $this - |1st child = $lastPositioned - |1st child span = $lastSpan + |1st child = ${last.positioned} + |1st child span = ${last.span} |2nd child = $p |2nd child span = ${p.span}""".stripMargin) } - lastPositioned = p - lastSpan = p.span + last.positioned = p + last.span = p.span p.checkPos(nonOverlapping) case m: untpd.Modifiers => m.annotations.foreach(check) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 083a92b26d11..c2147b6af2d3 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -14,10 +14,7 @@ import scala.collection.mutable import scala.annotation.tailrec -trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => - - // Note: the <: Type constraint looks necessary (and is needed to make the file compile in dotc). - // But Scalac accepts the program happily without it. Need to find out why. +trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree @@ -105,6 +102,12 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case _ => tree } + def stripTyped(tree: Tree): Tree = unsplice(tree) match + case Typed(expr, _) => + stripTyped(expr) + case _ => + tree + /** The number of arguments in an application */ def numArgs(tree: Tree): Int = unsplice(tree) match { case Apply(fn, args) => numArgs(fn) + args.length @@ -113,6 +116,24 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case _ => 0 } + /** The type arguments of a possibly curried call */ + def typeArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case TypeApply(fn, args) => loop(fn, args :: argss) + case Apply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + + /** The term arguments of a possibly curried call */ + def termArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case Apply(fn, args) => loop(fn, args :: argss) + case TypeApply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + /** All term arguments of an application in a single flattened list */ def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { case Apply(fn, args) => allArguments(fn) ::: args @@ -195,11 +216,11 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case arg => arg.typeOpt.widen.isRepeatedParam } - /** Is tree a type tree of the form `=> T` or (under -Ycc) `{refs}-> T`? */ + /** Is tree a type tree of the form `=> T` or (under pureFunctions) `{refs}-> T`? */ def isByNameType(tree: Tree)(using Context): Boolean = stripByNameType(tree) ne tree - /** Strip `=> T` to `T` and (under -Ycc) `{refs}-> T` to `T` */ + /** Strip `=> T` to `T` and (under pureFunctions) `{refs}-> T` to `T` */ def stripByNameType(tree: Tree)(using Context): Tree = unsplice(tree) match case ByNameTypeTree(t1) => t1 case untpd.CapturingTypeTree(_, parent) => @@ -298,7 +319,7 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => */ def parentsKind(parents: List[Tree])(using Context): FlagSet = parents match { case Nil => NoInitsInterface - case Apply(_, _ :: _) :: _ => EmptyFlags + case Apply(_, _ :: _) :: _ | Block(_, _) :: _ => EmptyFlags case _ :: parents1 => parentsKind(parents1) } @@ -311,6 +332,50 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case Block(_, expr) => forallResults(expr, p) case _ => p(tree) } + + def appliedCore(tree: Tree): Tree = tree match { + case Apply(fn, _) => appliedCore(fn) + case TypeApply(fn, _) => appliedCore(fn) + case AppliedTypeTree(fn, _) => appliedCore(fn) + case tree => tree + } + + /** Is tree an application with result `this.type`? + * Accept `b.addOne(x)` and also `xs(i) += x` + * where the op is an assignment operator. + */ + def isThisTypeResult(tree: Tree)(using Context): Boolean = appliedCore(tree) match { + case fun @ Select(receiver, op) => + val argss = termArgss(tree) + tree.tpe match { + case ThisType(tref) => + tref.symbol == receiver.symbol + case tref: TermRef => + tref.symbol == receiver.symbol || argss.exists(_.exists(tref.symbol == _.symbol)) + case _ => + def checkSingle(sym: Symbol): Boolean = + (sym == receiver.symbol) || { + receiver match { + case Apply(_, _) => op.isOpAssignmentName // xs(i) += x + case _ => receiver.symbol != NoSymbol && + (receiver.symbol.isGetter || receiver.symbol.isField) // xs.addOne(x) for var xs + } + } + @tailrec def loop(mt: Type): Boolean = mt match { + case m: MethodType => + m.resType match { + case ThisType(tref) => checkSingle(tref.symbol) + case tref: TermRef => checkSingle(tref.symbol) + case restpe => loop(restpe) + } + case PolyType(_, restpe) => loop(restpe) + case _ => false + } + fun.symbol != NoSymbol && loop(fun.symbol.info) + } + case _ => + tree.tpe.isInstanceOf[ThisType] + } } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => @@ -334,6 +399,8 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] Some(tree) case Block(Nil, expr) => functionWithUnknownParamType(expr) + case NamedArg(_, expr) => + functionWithUnknownParamType(expr) case _ => None } @@ -400,18 +467,18 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] } } - /** Under -Ycc: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. + /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. * Only trees of the form `=> T` are matched; trees written directly as `{*}-> T` * are ignored by the extractor. */ object ImpureByNameTypeTree: - + def apply(tp: ByNameTypeTree)(using Context): untpd.CapturingTypeTree = untpd.CapturingTypeTree( - Ident(nme.CAPTURE_ROOT).withSpan(tp.span.startPos) :: Nil, tp) + untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp) def unapply(tp: Tree)(using Context): Option[ByNameTypeTree] = tp match - case untpd.CapturingTypeTree(id @ Ident(nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) + case untpd.CapturingTypeTree(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) if id.span == bntp.span.startPos => Some(bntp) case _ => None end ImpureByNameTypeTree @@ -512,7 +579,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => sym.owner.isPrimitiveValueClass || sym.owner == defn.StringClass || defn.pureMethods.contains(sym) - tree.tpe.isInstanceOf[ConstantType] && isKnownPureOp(tree.symbol) // A constant expression with pure arguments is pure. + tree.tpe.isInstanceOf[ConstantType] && tree.symbol != NoSymbol && isKnownPureOp(tree.symbol) // A constant expression with pure arguments is pure. || fn.symbol.isStableMember && !fn.symbol.is(Lazy) // constructors of no-inits classes are stable /** The purity level of this reference. @@ -686,24 +753,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => } } - /** The type arguments of a possibly curried call */ - def typeArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case TypeApply(fn, args) => loop(fn, args :: argss) - case Apply(fn, args) => loop(fn, argss) - case _ => argss - loop(tree, Nil) - - /** The term arguments of a possibly curried call */ - def termArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case Apply(fn, args) => loop(fn, args :: argss) - case TypeApply(fn, args) => loop(fn, argss) - case _ => argss - loop(tree, Nil) - /** The type and term arguments of a possibly curried call, in the order they are given */ def allArgss(tree: Tree): List[List[Tree]] = @tailrec @@ -746,8 +795,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => Some(meth) case Block(Nil, expr) => unapply(expr) - case Inlined(_, bindings, expr) if bindings.forall(isPureBinding) => - unapply(expr) case _ => None } @@ -791,10 +838,12 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => /** The symbols defined locally in a statement list */ def localSyms(stats: List[Tree])(using Context): List[Symbol] = - val locals = new mutable.ListBuffer[Symbol] - for stat <- stats do - if stat.isDef && stat.symbol.exists then locals += stat.symbol - locals.toList + if stats.isEmpty then Nil + else + val locals = new mutable.ListBuffer[Symbol] + for stat <- stats do + if stat.isDef && stat.symbol.exists then locals += stat.symbol + locals.toList /** If `tree` is a DefTree, the symbol defined by it, otherwise NoSymbol */ def definedSym(tree: Tree)(using Context): Symbol = @@ -913,7 +962,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => && tree.isTerm && { val qualType = tree.qualifier.tpe - hasRefinement(qualType) && !qualType.derivesFrom(defn.PolyFunctionClass) + hasRefinement(qualType) && !defn.isRefinedFunctionType(qualType) } def loop(tree: Tree): Boolean = tree match case TypeApply(fun, _) => @@ -1040,7 +1089,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case Inlined(_, Nil, expr) => unapply(expr) case Block(Nil, expr) => unapply(expr) case _ => - tree.tpe.widenTermRefExpr.normalized match + tree.tpe.widenTermRefExpr.dealias.normalized match case ConstantType(Constant(x)) => Some(x) case _ => None } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala index caf8d68442f6..e52bf1064e4c 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala @@ -55,10 +55,10 @@ class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { transform(tree.tpt), transform(tree.rhs)(using nestedScopeCtx(tree.paramss.flatten))) } - case impl @ Template(constr, parents, self, _) => + case impl @ Template(constr, _, self, _) => cpy.Template(tree)( transformSub(constr), - transform(parents)(using ctx.superCallContext), + transform(impl.parents)(using ctx.superCallContext), Nil, transformSelf(self), transformStats(impl.body, tree.symbol)) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 71998aff9304..faeafae97f5e 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -43,7 +43,7 @@ class TreeTypeMap( def copy( typeMap: Type => Type, - treeMap: tpd.Tree => tpd.Tree, + treeMap: Tree => Tree, oldOwners: List[Symbol], newOwners: List[Symbol], substFrom: List[Symbol], @@ -85,26 +85,42 @@ class TreeTypeMap( updateDecls(prevStats.tail, newStats.tail) } - def transformInlined(tree: tpd.Inlined)(using Context): tpd.Tree = + def transformInlined(tree: Inlined)(using Context): Tree = val Inlined(call, bindings, expanded) = tree val (tmap1, bindings1) = transformDefs(bindings) val expanded1 = tmap1.transform(expanded) cpy.Inlined(tree)(call, bindings1, expanded1) - override def transform(tree: tpd.Tree)(using Context): tpd.Tree = treeMap(tree) match { - case impl @ Template(constr, parents, self, _) => + override def transform(tree: Tree)(using Context): Tree = treeMap(tree) match { + case impl @ Template(constr, _, self, _) => val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) cpy.Template(impl)( constr = tmap.transformSub(constr), - parents = parents.mapconserve(transform), + parents = impl.parents.mapconserve(transform), self = tmap.transformSub(self), body = impl.body mapconserve (tmap.transform(_)(using ctx.withOwner(mapOwner(impl.symbol.owner)))) ).withType(tmap.mapType(impl.tpe)) case tree1 => tree1.withType(mapType(tree1.tpe)) match { - case id: Ident if tpd.needsSelect(id.tpe) => - ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + case id: Ident => + if needsSelect(id.tpe) then + ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + else + super.transform(id) + case sel: Select => + if needsIdent(sel.tpe) then + ref(sel.tpe.asInstanceOf[TermRef]).withSpan(sel.span) + else + super.transform(sel) + case app: Apply => + super.transform(app) + case blk @ Block(stats, expr) => + val (tmap1, stats1) = transformDefs(stats) + val expr1 = tmap1.transform(expr) + cpy.Block(blk)(stats1, expr1) + case lit @ Literal(Constant(tpe: Type)) => + cpy.Literal(lit)(Constant(mapType(tpe))) case ddef @ DefDef(name, paramss, tpt, _) => val (tmap1, paramss1) = transformAllParamss(paramss) val res = cpy.DefDef(ddef)(name, paramss1, tmap1.transform(tpt), tmap1.transform(ddef.rhs)) @@ -117,10 +133,6 @@ class TreeTypeMap( case tdef @ LambdaTypeTree(tparams, body) => val (tmap1, tparams1) = transformDefs(tparams) cpy.LambdaTypeTree(tdef)(tparams1, tmap1.transform(body)) - case blk @ Block(stats, expr) => - val (tmap1, stats1) = transformDefs(stats) - val expr1 = tmap1.transform(expr) - cpy.Block(blk)(stats1, expr1) case inlined: Inlined => transformInlined(inlined) case cdef @ CaseDef(pat, guard, rhs) => @@ -139,18 +151,16 @@ class TreeTypeMap( val content1 = transform(content) val tpt1 = transform(tpt) cpy.Hole(tree)(args = args1, content = content1, tpt = tpt1) - case lit @ Literal(Constant(tpe: Type)) => - cpy.Literal(lit)(Constant(mapType(tpe))) case tree1 => super.transform(tree1) } } - override def transformStats(trees: List[tpd.Tree], exprOwner: Symbol)(using Context): List[Tree] = + override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = transformDefs(trees)._2 - def transformDefs[TT <: tpd.Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { - val tmap = withMappedSyms(tpd.localSyms(trees)) + def transformDefs[TT <: Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { + val tmap = withMappedSyms(localSyms(trees)) (tmap, tmap.transformSub(trees)) } @@ -165,7 +175,7 @@ class TreeTypeMap( case nil => (this, paramss) - def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] + def apply[ThisTree <: Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree)) diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 1159d13d5aef..c0b5987c3875 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -15,11 +15,12 @@ import config.Printers.overload import annotation.internal.sharable import annotation.unchecked.uncheckedVariance import annotation.constructorOnly +import compiletime.uninitialized import Decorators._ object Trees { - type Untyped = Nothing + type Untyped = Type | Null /** The total number of created tree nodes, maintained if Stats.enabled */ @sharable var ntrees: Int = 0 @@ -45,36 +46,34 @@ object Trees { * - Type checking an untyped tree should remove all embedded `TypedSplice` * nodes. */ - abstract class Tree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class Tree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable { if (Stats.enabled) ntrees += 1 /** The type constructor at the root of the tree */ - type ThisTree[T >: Untyped] <: Tree[T] + type ThisTree[T <: Untyped] <: Tree[T] - protected var myTpe: T @uncheckedVariance = _ + protected var myTpe: T @uncheckedVariance = uninitialized /** Destructively set the type of the tree. This should be called only when it is known that * it is safe under sharing to do so. One use-case is in the withType method below * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer, * where we overwrite with a simplified version of the type itself. */ - private[dotc] def overwriteType(tpe: T): Unit = + private[dotc] def overwriteType(tpe: T @uncheckedVariance): Unit = myTpe = tpe /** The type of the tree. In case of an untyped tree, * an UnAssignedTypeException is thrown. (Overridden by empty trees) */ - final def tpe: T @uncheckedVariance = { - if (myTpe == null) - throw UnAssignedTypeException(this) - myTpe - } + final def tpe: T = + if myTpe == null then throw UnAssignedTypeException(this) + myTpe.uncheckedNN /** Copy `tpe` attribute from tree `from` into this tree, independently * whether it is null or not. - final def copyAttr[U >: Untyped](from: Tree[U]): ThisTree[T] = { + final def copyAttr[U <: Untyped](from: Tree[U]): ThisTree[T] = { val t1 = this.withSpan(from.span) val t2 = if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type]) @@ -131,10 +130,9 @@ object Trees { */ final def hasType: Boolean = myTpe != null - final def typeOpt: Type = myTpe match { + final def typeOpt: Type = myTpe match case tp: Type => tp - case _ => NoType - } + case null => NoType /** The denotation referred to by this tree. * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other @@ -166,7 +164,7 @@ object Trees { def toList: List[Tree[T]] = this :: Nil /** if this tree is the empty tree, the alternative, else this tree */ - inline def orElse[U >: Untyped <: T](inline that: Tree[U]): Tree[U] = + inline def orElse[U >: T <: Untyped](inline that: Tree[U]): Tree[U] = if (this eq genericEmptyTree) that else this /** The number of nodes in this tree */ @@ -217,42 +215,42 @@ object Trees { override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] } - class UnAssignedTypeException[T >: Untyped](tree: Tree[T]) extends RuntimeException { + class UnAssignedTypeException[T <: Untyped](tree: Tree[T]) extends RuntimeException { override def getMessage: String = s"type of $tree is not assigned" } - type LazyTree[-T >: Untyped] = Tree[T] | Lazy[Tree[T]] - type LazyTreeList[-T >: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] + type LazyTree[+T <: Untyped] = Tree[T] | Lazy[Tree[T]] + type LazyTreeList[+T <: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] // ------ Categories of trees ----------------------------------- /** Instances of this class are trees for which isType is definitely true. * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) */ - trait TypTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: TypTree[T] + trait TypTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TypTree[T] override def isType: Boolean = true } /** Instances of this class are trees for which isTerm is definitely true. * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) */ - trait TermTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: TermTree[T] + trait TermTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TermTree[T] override def isTerm: Boolean = true } /** Instances of this class are trees which are not terms but are legal * parts of patterns. */ - trait PatternTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: PatternTree[T] + trait PatternTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: PatternTree[T] override def isPattern: Boolean = true } /** Tree's denotation can be derived from its type */ - abstract class DenotingTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: DenotingTree[T] + abstract class DenotingTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: DenotingTree[T] override def denot(using Context): Denotation = typeOpt.stripped match case tpe: NamedType => tpe.denot case tpe: ThisType => tpe.cls.denot @@ -262,8 +260,8 @@ object Trees { /** Tree's denot/isType/isTerm properties come from a subtree * identified by `forwardTo`. */ - abstract class ProxyTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: ProxyTree[T] + abstract class ProxyTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: ProxyTree[T] def forwardTo: Tree[T] override def denot(using Context): Denotation = forwardTo.denot override def isTerm: Boolean = forwardTo.isTerm @@ -271,24 +269,24 @@ object Trees { } /** Tree has a name */ - abstract class NameTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: NameTree[T] + abstract class NameTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: NameTree[T] def name: Name } /** Tree refers by name to a denotation */ - abstract class RefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[-T >: Untyped] <: RefTree[T] + abstract class RefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { + type ThisTree[+T <: Untyped] <: RefTree[T] def qualifier: Tree[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName } /** Tree defines a new symbol */ - trait DefTree[-T >: Untyped] extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: DefTree[T] + trait DefTree[+T <: Untyped] extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: DefTree[T] - private var myMods: untpd.Modifiers | Null = _ + private var myMods: untpd.Modifiers | Null = uninitialized private[dotc] def rawMods: untpd.Modifiers = if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN @@ -313,7 +311,7 @@ object Trees { extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods - sealed trait WithEndMarker[-T >: Untyped]: + sealed trait WithEndMarker[+T <: Untyped]: self: PackageDef[T] | NamedDefTree[T] => import WithEndMarker.* @@ -356,9 +354,9 @@ object Trees { end WithEndMarker - abstract class NamedDefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class NamedDefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] with DefTree[T] with WithEndMarker[T] { - type ThisTree[-T >: Untyped] <: NamedDefTree[T] + type ThisTree[+T <: Untyped] <: NamedDefTree[T] protected def srcName(using Context): Name = if name == nme.CONSTRUCTOR then nme.this_ @@ -395,8 +393,8 @@ object Trees { * The envelope of a MemberDef contains the whole definition and has its point * on the opening keyword (or the next token after that if keyword is missing). */ - abstract class MemberDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { - type ThisTree[-T >: Untyped] <: MemberDef[T] + abstract class MemberDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { + type ThisTree[+T <: Untyped] <: MemberDef[T] def rawComment: Option[Comment] = getAttachment(DocComment) @@ -409,40 +407,40 @@ object Trees { } /** A ValDef or DefDef tree */ - abstract class ValOrDefDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { - type ThisTree[-T >: Untyped] <: ValOrDefDef[T] + abstract class ValOrDefDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T], WithLazyFields { + type ThisTree[+T <: Untyped] <: ValOrDefDef[T] def name: TermName def tpt: Tree[T] - def unforcedRhs: LazyTree[T] = unforced - def rhs(using Context): Tree[T] = forceIfLazy + def unforcedRhs: LazyTree[T] + def rhs(using Context): Tree[T] } - trait ValOrTypeDef[-T >: Untyped] extends MemberDef[T]: - type ThisTree[-T >: Untyped] <: ValOrTypeDef[T] + trait ValOrTypeDef[+T <: Untyped] extends MemberDef[T]: + type ThisTree[+T <: Untyped] <: ValOrTypeDef[T] - type ParamClause[T >: Untyped] = List[ValDef[T]] | List[TypeDef[T]] + type ParamClause[T <: Untyped] = List[ValDef[T]] | List[TypeDef[T]] // ----------- Tree case classes ------------------------------------ /** name */ - case class Ident[-T >: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) + case class Ident[+T <: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) extends RefTree[T] { - type ThisTree[-T >: Untyped] = Ident[T] + type ThisTree[+T <: Untyped] = Ident[T] def qualifier: Tree[T] = genericEmptyTree def isBackquoted: Boolean = hasAttachment(Backquoted) } - class SearchFailureIdent[-T >: Untyped] private[ast] (name: Name, expl: => String)(implicit @constructorOnly src: SourceFile) + class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: => String)(implicit @constructorOnly src: SourceFile) extends Ident[T](name) { def explanation = expl override def toString: String = s"SearchFailureIdent($explanation)" } /** qualifier.name, or qualifier#name, if qualifier is a type */ - case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) + case class Select[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) extends RefTree[T] { - type ThisTree[-T >: Untyped] = Select[T] + type ThisTree[+T <: Untyped] = Select[T] override def denot(using Context): Denotation = typeOpt match case ConstantType(_) if ConstFold.foldedUnops.contains(name) => @@ -464,15 +462,15 @@ object Trees { else span } - class SelectWithSig[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) + class SelectWithSig[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) extends Select[T](qualifier, name) { override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" } /** qual.this */ - case class This[-T >: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) + case class This[+T <: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = This[T] + type ThisTree[+T <: Untyped] = This[T] // Denotation of a This tree is always the underlying class; needs correction for modules. override def denot(using Context): Denotation = typeOpt match { @@ -484,21 +482,21 @@ object Trees { } /** C.super[mix], where qual = C.this */ - case class Super[-T >: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) + case class Super[+T <: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Super[T] + type ThisTree[+T <: Untyped] = Super[T] def forwardTo: Tree[T] = qual } - abstract class GenericApply[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] <: GenericApply[T] + abstract class GenericApply[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] <: GenericApply[T] val fun: Tree[T] val args: List[Tree[T]] def forwardTo: Tree[T] = fun } object GenericApply: - def unapply[T >: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match + def unapply[T <: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match case tree: GenericApply[T] => Some((tree.fun, tree.args)) case _ => None @@ -509,9 +507,9 @@ object Trees { case InfixTuple // r f (x1, ..., xN) where N != 1; needs to be treated specially for an error message in typedApply /** fun(args) */ - case class Apply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Apply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends GenericApply[T] { - type ThisTree[-T >: Untyped] = Apply[T] + type ThisTree[+T <: Untyped] = Apply[T] def setApplyKind(kind: ApplyKind) = putAttachment(untpd.KindOfApply, kind) @@ -525,57 +523,57 @@ object Trees { } /** fun[args] */ - case class TypeApply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class TypeApply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends GenericApply[T] { - type ThisTree[-T >: Untyped] = TypeApply[T] + type ThisTree[+T <: Untyped] = TypeApply[T] } /** const */ - case class Literal[-T >: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) + case class Literal[+T <: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) extends Tree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Literal[T] + type ThisTree[+T <: Untyped] = Literal[T] } /** new tpt, but no constructor call */ - case class New[-T >: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class New[+T <: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = New[T] + type ThisTree[+T <: Untyped] = New[T] } /** expr : tpt */ - case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Typed[+T <: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Typed[T] + type ThisTree[+T <: Untyped] = Typed[T] def forwardTo: Tree[T] = expr } /** name = arg, in a parameter list */ - case class NamedArg[-T >: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) + case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = NamedArg[T] + type ThisTree[+T <: Untyped] = NamedArg[T] } /** name = arg, outside a parameter list */ - case class Assign[-T >: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Assign[+T <: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Assign[T] + type ThisTree[+T <: Untyped] = Assign[T] } /** { stats; expr } */ - case class Block[-T >: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Block[+T <: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = Block[T] + type ThisTree[+T <: Untyped] = Block[T] override def isType: Boolean = expr.isType override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary } /** if cond then thenp else elsep */ - case class If[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + case class If[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = If[T] + type ThisTree[+T <: Untyped] = If[T] def isInline = false } - class InlineIf[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + class InlineIf[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) extends If(cond, thenp, elsep) { override def isInline = true override def toString = s"InlineIf($cond, $thenp, $elsep)" @@ -590,33 +588,33 @@ object Trees { * of the closure is a function type, otherwise it is the type * given in `tpt`, which must be a SAM type. */ - case class Closure[-T >: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Closure[+T <: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Closure[T] + type ThisTree[+T <: Untyped] = Closure[T] } /** selector match { cases } */ - case class Match[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + case class Match[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Match[T] + type ThisTree[+T <: Untyped] = Match[T] def isInline = false } - class InlineMatch[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + class InlineMatch[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends Match(selector, cases) { override def isInline = true override def toString = s"InlineMatch($selector, $cases)" } /** case pat if guard => body */ - case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class CaseDef[+T <: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = CaseDef[T] + type ThisTree[+T <: Untyped] = CaseDef[T] } /** label[tpt]: { expr } */ - case class Labeled[-T >: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Labeled[+T <: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[-T >: Untyped] = Labeled[T] + type ThisTree[+T <: Untyped] = Labeled[T] def name: Name = bind.name } @@ -625,33 +623,33 @@ object Trees { * After program transformations this is not necessarily the enclosing method, because * closures can intervene. */ - case class Return[-T >: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) + case class Return[+T <: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Return[T] + type ThisTree[+T <: Untyped] = Return[T] } /** while (cond) { body } */ - case class WhileDo[-T >: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class WhileDo[+T <: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = WhileDo[T] + type ThisTree[+T <: Untyped] = WhileDo[T] } /** try block catch cases finally finalizer */ - case class Try[-T >: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Try[+T <: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Try[T] + type ThisTree[+T <: Untyped] = Try[T] } /** Seq(elems) * @param tpt The element type of the sequence. */ - case class SeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class SeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = SeqLiteral[T] + type ThisTree[+T <: Untyped] = SeqLiteral[T] } /** Array(elems) */ - class JavaSeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + class JavaSeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends SeqLiteral(elems, elemtpt) { override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" } @@ -672,17 +670,17 @@ object Trees { * different context: `bindings` represent the arguments to the inlined * call, whereas `expansion` represents the body of the inlined function. */ - case class Inlined[-T >: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Inlined[+T <: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = Inlined[T] + type ThisTree[+T <: Untyped] = Inlined[T] override def isTerm = expansion.isTerm override def isType = expansion.isType } /** A type tree that represents an existing or inferred type */ - case class TypeTree[-T >: Untyped]()(implicit @constructorOnly src: SourceFile) + case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = TypeTree[T] + type ThisTree[+T <: Untyped] = TypeTree[T] override def isEmpty: Boolean = !hasType override def toString: String = s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" @@ -693,25 +691,25 @@ object Trees { * - as a (result-)type of an inferred ValDef or DefDef. * Every TypeVar is created as the type of one InferredTypeTree. */ - class InferredTypeTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] + class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] /** ref.type */ - case class SingletonTypeTree[-T >: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) + case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = SingletonTypeTree[T] + type ThisTree[+T <: Untyped] = SingletonTypeTree[T] } /** tpt { refinements } */ - case class RefinedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class RefinedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = RefinedTypeTree[T] + type ThisTree[+T <: Untyped] = RefinedTypeTree[T] def forwardTo: Tree[T] = tpt } /** tpt[args] */ - case class AppliedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class AppliedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = AppliedTypeTree[T] + type ThisTree[+T <: Untyped] = AppliedTypeTree[T] def forwardTo: Tree[T] = tpt } @@ -738,40 +736,40 @@ object Trees { * source code written by the user with the trees used by the compiler (for * example, to make "find all references" work in the IDE). */ - case class LambdaTypeTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class LambdaTypeTree[+T <: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = LambdaTypeTree[T] + type ThisTree[+T <: Untyped] = LambdaTypeTree[T] } - case class TermLambdaTypeTree[-T >: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TermLambdaTypeTree[+T <: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = TermLambdaTypeTree[T] + type ThisTree[+T <: Untyped] = TermLambdaTypeTree[T] } /** [bound] selector match { cases } */ - case class MatchTypeTree[-T >: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + case class MatchTypeTree[+T <: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = MatchTypeTree[T] + type ThisTree[+T <: Untyped] = MatchTypeTree[T] } /** => T */ - case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) + case class ByNameTypeTree[+T <: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = ByNameTypeTree[T] + type ThisTree[+T <: Untyped] = ByNameTypeTree[T] } /** >: lo <: hi * >: lo <: hi = alias for RHS of bounded opaque type */ - case class TypeBoundsTree[-T >: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TypeBoundsTree[+T <: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = TypeBoundsTree[T] + type ThisTree[+T <: Untyped] = TypeBoundsTree[T] } /** name @ body */ - case class Bind[-T >: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Bind[+T <: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] with PatternTree[T] { - type ThisTree[-T >: Untyped] = Bind[T] + type ThisTree[+T <: Untyped] = Bind[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName @@ -780,9 +778,9 @@ object Trees { } /** tree_1 | ... | tree_n */ - case class Alternative[-T >: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Alternative[+T <: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends PatternTree[T] { - type ThisTree[-T >: Untyped] = Alternative[T] + type ThisTree[+T <: Untyped] = Alternative[T] } /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following @@ -799,29 +797,33 @@ object Trees { * val result = fun(sel)(implicits) * if (result.isDefined) "match patterns against result" */ - case class UnApply[-T >: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class UnApply[+T <: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with PatternTree[T] { - type ThisTree[-T >: Untyped] = UnApply[T] + type ThisTree[+T <: Untyped] = UnApply[T] def forwardTo = fun } /** mods val name: tpt = rhs */ - case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class ValDef[+T <: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) extends ValOrDefDef[T], ValOrTypeDef[T] { - type ThisTree[-T >: Untyped] = ValDef[T] + type ThisTree[+T <: Untyped] = ValDef[T] assert(isEmpty || (tpt ne genericEmptyTree)) - def unforced: LazyTree[T] = preRhs - protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + + def unforcedRhs: LazyTree[T] = preRhs + def forceFields()(using Context): Unit = preRhs = force(preRhs) + def rhs(using Context): Tree[T] = { forceFields(); preRhs.asInstanceOf[Tree[T]] } } /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ - case class DefDef[-T >: Untyped] private[ast] (name: TermName, - paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class DefDef[+T <: Untyped] private[ast] (name: TermName, + paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) extends ValOrDefDef[T] { - type ThisTree[-T >: Untyped] = DefDef[T] + type ThisTree[+T <: Untyped] = DefDef[T] assert(tpt ne genericEmptyTree) - def unforced: LazyTree[T] = preRhs - protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + + def unforcedRhs: LazyTree[T] = preRhs + def forceFields()(using Context): Unit = preRhs = force(preRhs) + def rhs(using Context): Tree[T] = { forceFields(); preRhs.asInstanceOf[Tree[T]] } def leadingTypeParams(using Context): List[TypeDef[T]] = paramss match case (tparams @ (tparam: TypeDef[_]) :: _) :: _ => tparams.asInstanceOf[List[TypeDef[T]]] @@ -842,9 +844,9 @@ object Trees { * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) or * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods */ - case class TypeDef[-T >: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TypeDef[+T <: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) extends MemberDef[T], ValOrTypeDef[T] { - type ThisTree[-T >: Untyped] = TypeDef[T] + type ThisTree[+T <: Untyped] = TypeDef[T] /** Is this a definition of a class? */ def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] @@ -857,22 +859,26 @@ object Trees { * if this is of class untpd.DerivingTemplate. * Typed templates only have parents. */ - case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) - extends DefTree[T] with WithLazyField[List[Tree[T]]] { - type ThisTree[-T >: Untyped] = Template[T] - def unforcedBody: LazyTreeList[T] = unforced - def unforced: LazyTreeList[T] = preBody - protected def force(x: List[Tree[T @uncheckedVariance]]): Unit = preBody = x - def body(using Context): List[Tree[T]] = forceIfLazy + case class Template[+T <: Untyped] private[ast] (constr: DefDef[T], private var preParentsOrDerived: LazyTreeList[T], self: ValDef[T], private var preBody: LazyTreeList[T])(implicit @constructorOnly src: SourceFile) + extends DefTree[T] with WithLazyFields { + type ThisTree[+T <: Untyped] = Template[T] + + def forceFields()(using Context): Unit = + preParentsOrDerived = force(preParentsOrDerived) + preBody = force(preBody) - def parents: List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate - def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate + def unforcedBody: LazyTreeList[T] = preBody + def body(using Context): List[Tree[T]] = { forceFields(); preBody.asInstanceOf[List[Tree[T]]] } + def parentsOrDerived(using Context): List[Tree[T]] = { forceFields(); preParentsOrDerived.asInstanceOf[List[Tree[T]]] } + + def parents(using Context): List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate + def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate } - abstract class ImportOrExport[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class ImportOrExport[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: ImportOrExport[T] + type ThisTree[+T <: Untyped] <: ImportOrExport[T] val expr: Tree[T] val selectors: List[untpd.ImportSelector] } @@ -881,36 +887,36 @@ object Trees { * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ - case class Import[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + case class Import[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) extends ImportOrExport[T] { - type ThisTree[-T >: Untyped] = Import[T] + type ThisTree[+T <: Untyped] = Import[T] } /** export expr.selectors * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ - case class Export[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + case class Export[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) extends ImportOrExport[T] { - type ThisTree[-T >: Untyped] = Export[T] + type ThisTree[+T <: Untyped] = Export[T] } /** package pid { stats } */ - case class PackageDef[-T >: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class PackageDef[+T <: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with WithEndMarker[T] { - type ThisTree[-T >: Untyped] = PackageDef[T] + type ThisTree[+T <: Untyped] = PackageDef[T] def forwardTo: RefTree[T] = pid protected def srcName(using Context): Name = pid.name } /** arg @annot */ - case class Annotated[-T >: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Annotated[+T <: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] { - type ThisTree[-T >: Untyped] = Annotated[T] + type ThisTree[+T <: Untyped] = Annotated[T] def forwardTo: Tree[T] = arg } - trait WithoutTypeOrPos[-T >: Untyped] extends Tree[T] { + trait WithoutTypeOrPos[+T <: Untyped] extends Tree[T] { override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] override def span: Span = NoSpan override def span_=(span: Span): Unit = {} @@ -921,17 +927,17 @@ object Trees { * The contained trees will be integrated when transformed with * a `transform(List[Tree])` call. */ - case class Thicket[-T >: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Thicket[+T <: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends Tree[T] with WithoutTypeOrPos[T] { myTpe = NoType.asInstanceOf[T] - type ThisTree[-T >: Untyped] = Thicket[T] + type ThisTree[+T <: Untyped] = Thicket[T] - def mapElems(op: Tree[T] => Tree[T] @uncheckedVariance): Thicket[T] = { + def mapElems[U >: T <: Untyped](op: Tree[T] => Tree[U]): Thicket[U] = { val newTrees = trees.mapConserve(op) if (trees eq newTrees) this else - Thicket[T](newTrees)(source).asInstanceOf[this.type] + Thicket[U](newTrees)(source).asInstanceOf[this.type] } override def foreachInThicket(op: Tree[T] => Unit): Unit = @@ -950,12 +956,12 @@ object Trees { mapElems(_.withSpan(span)).asInstanceOf[this.type] } - class EmptyTree[T >: Untyped] extends Thicket(Nil)(NoSource) { + class EmptyTree[T <: Untyped] extends Thicket(Nil)(NoSource) { // assert(uniqueId != 1492) override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") } - class EmptyValDef[T >: Untyped] extends ValDef[T]( + class EmptyValDef[T <: Untyped] extends ValDef[T]( nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { myTpe = NoType.asInstanceOf[T] setMods(untpd.Modifiers(PrivateLocal)) @@ -966,8 +972,8 @@ object Trees { @sharable val theEmptyTree = new EmptyTree[Type]() @sharable val theEmptyValDef = new EmptyValDef[Type]() - def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] - def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] + def genericEmptyValDef[T <: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] + def genericEmptyTree[T <: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] /** Tree that replaces a level 1 splices in pickled (level 0) quotes. * It is only used when picking quotes (will never be in a TASTy file). @@ -978,13 +984,13 @@ object Trees { * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. * @param tpt Type of the hole */ - case class Hole[-T >: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: Hole[T] + case class Hole[+T <: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: Hole[T] override def isTerm: Boolean = isTermHole override def isType: Boolean = !isTermHole } - def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { + def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = remaining match { case Thicket(elems) :: remaining1 => @@ -1010,34 +1016,31 @@ object Trees { // ----- Lazy trees and tree sequences - /** A tree that can have a lazy field - * The field is represented by some private `var` which is - * accessed by `unforced` and `force`. Forcing the field will - * set the `var` to the underlying value. - */ - trait WithLazyField[+T <: AnyRef] { - def unforced: T | Lazy[T] - protected def force(x: T @uncheckedVariance): Unit - def forceIfLazy(using Context): T = unforced match { - case lzy: Lazy[T @unchecked] => - val x = lzy.complete - force(x) - x - case x: T @ unchecked => x - } - } - /** A base trait for lazy tree fields. * These can be instantiated with Lazy instances which * can delay tree construction until the field is first demanded. */ - trait Lazy[+T <: AnyRef] { + trait Lazy[+T <: AnyRef]: def complete(using Context): T - } + + /** A tree that can have a lazy fields. + * Such fields are variables of type `T | Lazy[T]`, for some tyope `T`. + */ + trait WithLazyFields: + + /** If `x` is lazy, computes the underlying value */ + protected def force[T <: AnyRef](x: T | Lazy[T])(using Context): T = x match + case x: Lazy[T] @unchecked => x.complete + case x: T @unchecked => x + + /** Assigns all lazy fields their underlying non-lazy value. */ + def forceFields()(using Context): Unit + + end WithLazyFields // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. - abstract class Instance[T >: Untyped <: Type] { inst => + abstract class Instance[T <: Untyped] { inst => type Tree = Trees.Tree[T] type TypTree = Trees.TypTree[T] @@ -1357,7 +1360,7 @@ object Trees { DefDef(tree: Tree)(name, paramss, tpt, rhs) def TypeDef(tree: TypeDef)(name: TypeName = tree.name, rhs: Tree = tree.rhs)(using Context): TypeDef = TypeDef(tree: Tree)(name, rhs) - def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody)(using Context): Template = + def Template(tree: Template)(using Context)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody): Template = Template(tree: Tree)(constr, parents, derived, self, body) def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content, tpt: Tree = tree.tpt)(using Context): Hole = Hole(tree: Tree)(isTerm, idx, args, content, tpt) @@ -1372,7 +1375,7 @@ object Trees { * innermost enclosing call for which the inlined version is currently * processed. */ - protected def inlineContext(call: Tree)(using Context): Context = ctx + protected def inlineContext(call: tpd.Tree)(using Context): Context = ctx /** The context to use when mapping or accumulating over a tree */ def localCtx(tree: Tree)(using Context): Context @@ -1620,8 +1623,8 @@ object Trees { inContext(localCtx(tree)) { this(x, rhs) } - case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => - this(this(this(this(x, constr), parents), self), tree.body) + case tree @ Template(constr, _, self, _) if tree.derived.isEmpty => + this(this(this(this(x, constr), tree.parents), self), tree.body) case Import(expr, _) => this(x, expr) case Export(expr, _) => @@ -1747,7 +1750,7 @@ object Trees { val denot = receiver.tpe.member(method) if !denot.exists then overload.println(i"members = ${receiver.tpe.decls}") - report.error(i"no member $receiver . $method", receiver.srcPos) + report.error(em"no member $receiver . $method", receiver.srcPos) val selected = if (denot.isOverloaded) { def typeParamCount(tp: Type) = tp.widen match { diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 52325e36037d..d1b1cdf607b5 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -47,12 +47,18 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { Apply(expr, args) case _: RefTree | _: GenericApply | _: Inlined | _: Hole => ta.assignType(untpd.Apply(fn, args), fn, args) + case _ => + assert(ctx.reporter.errorsReported) + ta.assignType(untpd.Apply(fn, args), fn, args) def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match case Block(Nil, expr) => TypeApply(expr, args) case _: RefTree | _: GenericApply => ta.assignType(untpd.TypeApply(fn, args), fn, args) + case _ => + assert(ctx.reporter.errorsReported) + ta.assignType(untpd.TypeApply(fn, args), fn, args) def Literal(const: Constant)(using Context): Literal = ta.assignType(untpd.Literal(const)) @@ -254,12 +260,12 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { // If `isParamDependent == false`, the value of `previousParamRefs` is not used. if isParamDependent then mutable.ListBuffer[TermRef]() else (null: ListBuffer[TermRef] | Null).uncheckedNN - def valueParam(name: TermName, origInfo: Type): TermSymbol = + def valueParam(name: TermName, origInfo: Type, isErased: Boolean): TermSymbol = val maybeImplicit = if tp.isContextualMethod then Given else if tp.isImplicitMethod then Implicit else EmptyFlags - val maybeErased = if tp.isErasedMethod then Erased else EmptyFlags + val maybeErased = if isErased then Erased else EmptyFlags def makeSym(info: Type) = newSymbol(sym, name, TermParam | maybeImplicit | maybeErased, info, coord = sym.coord) @@ -277,7 +283,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { assert(vparams.hasSameLengthAs(tp.paramNames) && vparams.head.isTerm) (vparams.asInstanceOf[List[TermSymbol]], remaining1) case nil => - (tp.paramNames.lazyZip(tp.paramInfos).map(valueParam), Nil) + (tp.paramNames.lazyZip(tp.paramInfos).lazyZip(tp.erasedParams).map(valueParam), Nil) val (rtp, paramss) = recur(tp.instantiate(vparams.map(_.termRef)), remaining1) (rtp, vparams :: paramss) case _ => @@ -414,6 +420,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _ => false } + def needsIdent(tp: Type)(using Context): Boolean = tp match + case tp: TermRef => tp.prefix eq NoPrefix + case _ => false + /** A tree representing the same reference as the given type */ def ref(tp: NamedType, needLoad: Boolean = true)(using Context): Tree = if (tp.isType) TypeTree(tp) @@ -428,7 +438,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else val res = Select(TypeTree(pre), tp) if needLoad && !res.symbol.isStatic then - throw new TypeError(em"cannot establish a reference to $res") + throw TypeError(em"cannot establish a reference to $res") res def ref(sym: Symbol)(using Context): Tree = @@ -857,7 +867,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } /** After phase `trans`, set the owner of every definition in this tree that was formerly - * owner by `from` to `to`. + * owned by `from` to `to`. */ def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(using Context): ThisTree = if (ctx.phase == trans.next) { @@ -1130,10 +1140,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def etaExpandCFT(using Context): Tree = def expand(target: Tree, tp: Type)(using Context): Tree = tp match - case defn.ContextFunctionType(argTypes, resType, isErased) => + case defn.ContextFunctionType(argTypes, resType, _) => val anonFun = newAnonFun( ctx.owner, - MethodType.companion(isContextual = true, isErased = isErased)(argTypes, resType), + MethodType.companion(isContextual = true)(argTypes, resType), coord = ctx.owner.coord) def lambdaBody(refss: List[List[Tree]]) = expand(target.select(nme.apply).appliedToArgss(refss), resType)( @@ -1144,35 +1154,38 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { expand(tree, tree.tpe.widen) } - inline val MapRecursionLimit = 10 - extension (trees: List[Tree]) - /** A map that expands to a recursive function. It's equivalent to + /** Equivalent (but faster) to * * flatten(trees.mapConserve(op)) * - * and falls back to it after `MaxRecursionLimit` recursions. - * Before that it uses a simpler method that uses stackspace - * instead of heap. - * Note `op` is duplicated in the generated code, so it should be - * kept small. + * assuming that `trees` does not contain `Thicket`s to start with. */ - inline def mapInline(inline op: Tree => Tree): List[Tree] = - def recur(trees: List[Tree], count: Int): List[Tree] = - if count > MapRecursionLimit then - // use a slower implementation that avoids stack overflows - flatten(trees.mapConserve(op)) - else trees match - case tree :: rest => - val tree1 = op(tree) - val rest1 = recur(rest, count + 1) - if (tree1 eq tree) && (rest1 eq rest) then trees - else tree1 match - case Thicket(elems1) => elems1 ::: rest1 - case _ => tree1 :: rest1 - case nil => nil - recur(trees, 0) + inline def flattenedMapConserve(inline f: Tree => Tree): List[Tree] = + @tailrec + def loop(mapped: ListBuffer[Tree] | Null, unchanged: List[Tree], pending: List[Tree]): List[Tree] = + if pending.isEmpty then + if mapped == null then unchanged + else mapped.prependToList(unchanged) + else + val head0 = pending.head + val head1 = f(head0) + + if head1 eq head0 then + loop(mapped, unchanged, pending.tail) + else + val buf = if mapped == null then new ListBuffer[Tree] else mapped + var xc = unchanged + while xc ne pending do + buf += xc.head + xc = xc.tail + head1 match + case Thicket(elems1) => buf ++= elems1 + case _ => buf += head1 + val tail0 = pending.tail + loop(buf, tail0, tail0) + loop(null, trees, trees) /** Transform statements while maintaining import contexts and expression contexts * in the same way as Typer does. The code addresses additional concerns: @@ -1296,7 +1309,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else if (tree.tpe.widen isRef numericCls) tree else { - report.warning(i"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") + report.warning(em"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") Throw(New(defn.ClassCastExceptionClass.typeRef, Nil)).withSpan(tree.span) } } @@ -1495,7 +1508,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } } - /** Creates the tuple type tree repesentation of the type trees in `ts` */ + /** Creates the tuple type tree representation of the type trees in `ts` */ def tupleTypeTree(elems: List[Tree])(using Context): Tree = { val arity = elems.length if arity <= Definitions.MaxTupleArity then @@ -1506,10 +1519,14 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else nestedPairsTypeTree(elems) } - /** Creates the nested pairs type tree repesentation of the type trees in `ts` */ + /** Creates the nested pairs type tree representation of the type trees in `ts` */ def nestedPairsTypeTree(ts: List[Tree])(using Context): Tree = ts.foldRight[Tree](TypeTree(defn.EmptyTupleModule.termRef))((x, acc) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), x :: acc :: Nil)) + /** Creates the nested higher-kinded pairs type tree representation of the type trees in `ts` */ + def hkNestedPairsTypeTree(ts: List[Tree])(using Context): Tree = + ts.foldRight[Tree](TypeTree(defn.QuoteMatching_KNil.typeRef))((x, acc) => AppliedTypeTree(TypeTree(defn.QuoteMatching_KCons.typeRef), x :: acc :: Nil)) + /** Replaces all positions in `tree` with zero-extent positions */ private def focusPositions(tree: Tree)(using Context): Tree = { val transformer = new tpd.TreeMap { diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 6f3f134f9342..a262c3658399 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -42,7 +42,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** mods object name impl */ case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) extends MemberDef { - type ThisTree[-T >: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef + type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) } @@ -54,7 +54,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { */ class DerivingTemplate(constr: DefDef, parentsOrDerived: List[Tree], self: ValDef, preBody: LazyTreeList, derivedCount: Int)(implicit @constructorOnly src: SourceFile) extends Template(constr, parentsOrDerived, self, preBody) { - override val parents = parentsOrDerived.dropRight(derivedCount) + private val myParents = parentsOrDerived.dropRight(derivedCount) + override def parents(using Context) = myParents override val derived = parentsOrDerived.takeRight(derivedCount) } @@ -75,9 +76,13 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { override def isType: Boolean = body.isType } - /** A function type or closure with `implicit`, `erased`, or `given` modifiers */ - class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers)(implicit @constructorOnly src: SourceFile) - extends Function(args, body) + /** A function type or closure with `implicit` or `given` modifiers and information on which parameters are `erased` */ + class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers, val erasedParams: List[Boolean])(implicit @constructorOnly src: SourceFile) + extends Function(args, body) { + assert(args.length == erasedParams.length) + + def hasErasedParams = erasedParams.contains(true) + } /** A polymorphic function type */ case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -117,6 +122,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class Into(tpt: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -145,7 +151,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case Floating } - /** {x1, ..., xN} T (only relevant under -Ycc) */ + /** {x1, ..., xN} T (only relevant under captureChecking) */ case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ @@ -217,7 +223,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) - /** Used under -Ycc to mark impure function types `A => B` in `FunctionWithMods` */ + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) } @@ -414,6 +420,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def Template(constr: DefDef, parents: List[Tree], derived: List[Tree], self: ValDef, body: LazyTreeList)(implicit src: SourceFile): Template = if (derived.isEmpty) new Template(constr, parents, self, body) else new DerivingTemplate(constr, parents ++ derived, self, body, derived.length) + def Template(constr: DefDef, parents: LazyTreeList, self: ValDef, body: LazyTreeList)(implicit src: SourceFile): Template = + new Template(constr, parents, self, body) def Import(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Import = new Import(expr, selectors) def Export(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Export = new Export(expr, selectors) def PackageDef(pid: RefTree, stats: List[Tree])(implicit src: SourceFile): PackageDef = new PackageDef(pid, stats) @@ -492,6 +500,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def scalaAny(implicit src: SourceFile): Select = scalaDot(tpnme.Any) def javaDotLangDot(name: Name)(implicit src: SourceFile): Select = Select(Select(Ident(nme.java), nme.lang), name) + def captureRoot(using Context): Select = + Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) + def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) @@ -646,6 +657,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def Into(tree: Tree)(tpt: Tree)(using Context): Tree = tree match + case tree: Into if tpt eq tree.tpt => tree + case _ => finalize(tree, untpd.Into(tpt)(tree.source)) def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) @@ -715,6 +729,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) case ExtMethods(paramss, methods) => cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case Into(tpt) => + cpy.Into(tree)(transform(tpt)) case ImportSelector(imported, renamed, bound) => cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) case Number(_, _) | TypedSplice(_) => @@ -774,6 +790,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(this(x, pats), tpt), rhs) case ExtMethods(paramss, methods) => this(paramss.foldLeft(x)(apply), methods) + case Into(tpt) => + this(x, tpt) case ImportSelector(imported, renamed, bound) => this(this(this(x, imported), renamed), bound) case Number(_, _) => diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala index 0fd96fe2462c..fd89159e2076 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala @@ -39,8 +39,7 @@ case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) exte override def symbol(using Context) = cls - override def derivedAnnotation(tree: Tree)(using Context): Annotation = - unsupported(i"derivedAnnotation(Tree), $tree, $refs") + override def derivedAnnotation(tree: Tree)(using Context): Annotation = this def derivedAnnotation(refs: CaptureSet, boxed: Boolean)(using Context): Annotation = if (this.refs eq refs) && (this.boxed == boxed) then this diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 0ebf7c1c01e9..decd428f5365 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -9,6 +9,7 @@ import Decorators.*, NameOps.* import config.Printers.capt import util.Property.Key import tpd.* +import config.Feature private val Captures: Key[CaptureSet] = Key() private val BoxedType: Key[BoxedTypeCache] = Key() @@ -40,6 +41,22 @@ extension (tree: Tree) tree.putAttachment(Captures, refs) refs + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Tree = + if Feature.pureFunsEnabledSomewhere then + val rbn = defn.RetainsByNameAnnot + Annotated(tree, + New(rbn.typeRef).select(rbn.primaryConstructor).appliedTo( + Typed( + SeqLiteral(ref(defn.captureRoot) :: Nil, TypeTree(defn.AnyType)), + TypeTree(defn.RepeatedParamType.appliedTo(defn.AnyType)) + ) + ) + ) + else tree + extension (tp: Type) /** @pre `tp` is a CapturingType */ @@ -96,6 +113,19 @@ extension (tp: Type) /** Is the boxedCaptureSet of this type nonempty? */ def isBoxedCapturing(using Context) = !tp.boxedCaptureSet.isAlwaysEmpty + /** If this type is a capturing type, the version with boxed statues as given by `boxed`. + * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing + * type that captures the TermRef. + */ + def forceBoxStatus(boxed: Boolean)(using Context): Type = tp.widenDealias match + case tp @ CapturingType(parent, refs) if tp.isBoxed != boxed => + val refs1 = tp match + case ref: CaptureRef if ref.isTracked => ref.singletonCaptureSet + case _ => refs + CapturingType(parent, refs1, boxed) + case _ => + tp + /** Map capturing type to their parents. Capturing types accessible * via dealising are also stripped. */ @@ -107,22 +137,82 @@ extension (tp: Type) case _ => tp - /** Under -Ycc, map regular function type to impure function type + /** Under pureFunctions, map regular function type to impure function type */ - def adaptFunctionTypeUnderCC(using Context): Type = tp match + def adaptFunctionTypeUnderPureFuns(using Context): Type = tp match case AppliedType(fn, args) - if ctx.settings.Ycc.value && defn.isFunctionClass(fn.typeSymbol) => + if Feature.pureFunsEnabledSomewhere && defn.isFunctionClass(fn.typeSymbol) => val fname = fn.typeSymbol.name defn.FunctionType( fname.functionArity, isContextual = fname.isContextFunction, - isErased = fname.isErasedFunction, isImpure = true).appliedTo(args) case _ => tp + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Type = + if Feature.pureFunsEnabledSomewhere then + AnnotatedType(tp, + CaptureAnnotation(CaptureSet.universal, boxed = false)(defn.RetainsByNameAnnot)) + else + tp + + def isCapturingType(using Context): Boolean = + tp match + case CapturingType(_, _) => true + case _ => false + + def isEventuallyCapturingType(using Context): Boolean = + tp match + case EventuallyCapturingType(_, _) => true + case _ => false + + /** Is type known to be always pure by its class structure, + * so that adding a capture set to it would not make sense? + */ + def isAlwaysPure(using Context): Boolean = tp.dealias match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then sym.isPureClass + else tp.superType.isAlwaysPure + case CapturingType(parent, refs) => + parent.isAlwaysPure || refs.isAlwaysEmpty + case tp: TypeProxy => + tp.superType.isAlwaysPure + case tp: AndType => + tp.tp1.isAlwaysPure || tp.tp2.isAlwaysPure + case tp: OrType => + tp.tp1.isAlwaysPure && tp.tp2.isAlwaysPure + case _ => + false + +extension (cls: ClassSymbol) + + def pureBaseClass(using Context): Option[Symbol] = + cls.baseClasses.find(bc => + defn.pureBaseClasses.contains(bc) + || { + val selfType = bc.givenSelfType + selfType.exists && selfType.captureSet.isAlwaysEmpty + }) + extension (sym: Symbol) + /** A class is pure if: + * - one its base types has an explicitly declared self type with an empty capture set + * - or it is a value class + * - or it is an exception + * - or it is one of Nothing, Null, or String + */ + def isPureClass(using Context): Boolean = sym match + case cls: ClassSymbol => + cls.pureBaseClass.isDefined || defn.pureSimpleClasses.contains(cls) + case _ => + false + /** Does this symbol allow results carrying the universal capability? * Currently this is true only for function type applies (since their * results are unboxed) and `erasedValue` since this function is magic in @@ -150,6 +240,8 @@ extension (sym: Symbol) case _ => false containsEnclTypeParam(sym.info.finalResultType) && !sym.allowsRootCapture + && sym != defn.Caps_unsafeBox + && sym != defn.Caps_unsafeUnbox extension (tp: AnnotatedType) /** Is this a boxed capturing type? */ diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index d3e32ac538a4..2b9fe9d3d923 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -222,7 +222,7 @@ sealed abstract class CaptureSet extends Showable: /** The largest subset (via <:<) of this capture set that only contains elements * for which `p` is true. */ - def filter(p: CaptureRef => Boolean)(using Context): CaptureSet = + def filter(p: Context ?=> CaptureRef => Boolean)(using Context): CaptureSet = if this.isConst then val elems1 = elems.filter(p) if elems1 == elems then this @@ -271,7 +271,7 @@ sealed abstract class CaptureSet extends Showable: map(Substituters.SubstParamsMap(tl, to)) /** Invoke handler if this set has (or later aquires) the root capability `*` */ - def disallowRootCapability(handler: () => Unit)(using Context): this.type = + def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = if isUniversal then handler() this @@ -373,7 +373,7 @@ object CaptureSet: def isAlwaysEmpty = false /** A handler to be invoked if the root reference `*` is added to this set */ - var addRootHandler: () => Unit = () => () + var rootAddedHandler: () => Context ?=> Unit = () => () var description: String = "" @@ -404,7 +404,7 @@ object CaptureSet: def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = if !isConst && recordElemsState() then elems ++= newElems - if isUniversal then addRootHandler() + if isUniversal then rootAddedHandler() // assert(id != 2 || elems.size != 2, this) (CompareResult.OK /: deps) { (r, dep) => r.andAlso(dep.tryInclude(newElems, this)) @@ -421,8 +421,8 @@ object CaptureSet: else CompareResult.fail(this) - override def disallowRootCapability(handler: () => Unit)(using Context): this.type = - addRootHandler = handler + override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = + rootAddedHandler = handler super.disallowRootCapability(handler) private var computingApprox = false @@ -546,7 +546,7 @@ object CaptureSet: else CompareResult.fail(this) } .andAlso { - if (origin ne source) && mapIsIdempotent then + if (origin ne source) && (origin ne initial) && mapIsIdempotent then // `tm` is idempotent, propagate back elems from image set. // This is sound, since we know that for `r in newElems: tm(r) = r`, hence // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. @@ -559,7 +559,7 @@ object CaptureSet: // elements from variable sources in contra- and non-variant positions. In essence, // we approximate types resulting from such maps by returning a possible super type // from the actual type. But this is neither sound nor complete. - report.warning(i"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") + report.warning(em"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") CompareResult.fail(this) else CompareResult.OK @@ -613,7 +613,7 @@ object CaptureSet: /** A variable with elements given at any time as { x <- source.elems | p(x) } */ class Filtered private[CaptureSet] - (val source: Var, p: CaptureRef => Boolean)(using @constructorOnly ctx: Context) + (val source: Var, p: Context ?=> CaptureRef => Boolean)(using @constructorOnly ctx: Context) extends DerivedVar(source.elems.filter(p)): override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = diff --git a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala index 05e813793a63..a7c283f4cc3b 100644 --- a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala +++ b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala @@ -41,10 +41,23 @@ object CapturingType: * returned separately by CaptureOps.isBoxed. */ def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = - if ctx.phase == Phases.checkCapturesPhase && tp.annot.symbol == defn.RetainsAnnot then + if ctx.phase == Phases.checkCapturesPhase + && tp.annot.symbol == defn.RetainsAnnot + && !ctx.mode.is(Mode.IgnoreCaptures) + then EventuallyCapturingType.unapply(tp) else None + /** Check whether a type is uncachable when computing `baseType`. + * - Avoid caching all the types during the setup phase, since at that point + * the capture set variables are not fully installed yet. + * - Avoid caching capturing types when IgnoreCaptures mode is set, since the + * capture sets may be thrown away in the computed base type. + */ + def isUncachable(tp: Type)(using Context): Boolean = + ctx.phase == Phases.checkCapturesPhase && + (Setup.isDuringSetup || ctx.mode.is(Mode.IgnoreCaptures) && tp.isEventuallyCapturingType) + end CapturingType /** An extractor for types that will be capturing types at phase CheckCaptures. Also diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index fe22f9f49e13..f9401a0c509f 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -5,12 +5,13 @@ package cc import core.* import Phases.*, DenotTransformers.*, SymDenotations.* import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* -import Types.*, StdNames.* +import Types.*, StdNames.*, Denotations.* import config.Printers.{capt, recheckr} -import config.Config +import config.{Config, Feature} import ast.{tpd, untpd, Trees} import Trees.* -import typer.RefChecks.{checkAllOverrides, checkParents} +import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPairsChecker} +import typer.Checking.{checkBounds, checkAppliedTypesIn} import util.{SimpleIdentitySet, EqHashMap, SrcPos} import transform.SymUtils.* import transform.{Recheck, PreRecheck} @@ -18,6 +19,7 @@ import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} import StdNames.nme +import NameKinds.DefaultGetterName import reporting.trace /** The capture checker */ @@ -26,7 +28,7 @@ object CheckCaptures: class Pre extends PreRecheck, SymTransformer: - override def isEnabled(using Context) = ctx.settings.Ycc.value + override def isEnabled(using Context) = true /** Reset `private` flags of parameter accessors so that we can refine them * in Setup if they have non-empty capture sets. Special handling of some @@ -42,12 +44,20 @@ object CheckCaptures: end Pre /** A class describing environments. - * @param owner the current owner - * @param captured the caputure set containing all references to tracked free variables outside of boxes - * @param isBoxed true if the environment is inside a box (in which case references are not counted) - * @param outer0 the next enclosing environment + * @param owner the current owner + * @param nestedInOwner true if the environment is a temporary one nested in the owner's environment, + * and does not have a different actual owner symbol (this happens when doing box adaptation). + * @param captured the caputure set containing all references to tracked free variables outside of boxes + * @param isBoxed true if the environment is inside a box (in which case references are not counted) + * @param outer0 the next enclosing environment */ - case class Env(owner: Symbol, captured: CaptureSet, isBoxed: Boolean, outer0: Env | Null): + case class Env( + owner: Symbol, + nestedInOwner: Boolean, + captured: CaptureSet, + isBoxed: Boolean, + outer0: Env | Null + ): def outer = outer0.nn def isOutermost = outer0 == null @@ -81,7 +91,7 @@ object CheckCaptures: elem.tpe match case ref: CaptureRef => if !ref.canBeTracked then - report.error(em"$elem cannot be tracked since it is not a parameter or a local variable", elem.srcPos) + report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) case tpe => report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) @@ -125,30 +135,18 @@ class CheckCaptures extends Recheck, SymTransformer: import CheckCaptures.* def phaseName: String = "cc" - override def isEnabled(using Context) = ctx.settings.Ycc.value + override def isEnabled(using Context) = true def newRechecker()(using Context) = CaptureChecker(ctx) override def run(using Context): Unit = - checkOverrides.traverse(ctx.compilationUnit.tpdTree) - super.run + if Feature.ccEnabled then + super.run override def transformSym(sym: SymDenotation)(using Context): SymDenotation = if Synthetics.needsTransform(sym) then Synthetics.transformFromCC(sym) else super.transformSym(sym) - /** Check overrides again, taking capture sets into account. - * TODO: Can we avoid doing overrides checks twice? - * We need to do them here since only at this phase CaptureTypes are relevant - * But maybe we can then elide the check during the RefChecks phase if -Ycc is set? - */ - def checkOverrides = new TreeTraverser: - def traverse(t: Tree)(using Context) = - t match - case t: Template => checkAllOverrides(ctx.owner.asClass) - case _ => - traverseChildren(t) - class CaptureChecker(ictx: Context) extends Rechecker(ictx): import ast.tpd.* @@ -192,7 +190,7 @@ class CheckCaptures extends Recheck, SymTransformer: def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos)(using Context) = val res = elem.singletonCaptureSet.subCaptures(cs, frozen = false) if !res.isOK then - report.error(i"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) + report.error(em"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) /** Check subcapturing `cs1 <: cs2`, report error on failure */ def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos)(using Context) = @@ -201,10 +199,10 @@ class CheckCaptures extends Recheck, SymTransformer: def header = if cs1.elems.size == 1 then i"reference ${cs1.elems.toList}%, % is not" else i"references $cs1 are not all" - report.error(i"$header included in allowed capture set ${res.blocking}", pos) + report.error(em"$header included in allowed capture set ${res.blocking}", pos) /** The current environment */ - private var curEnv: Env = Env(NoSymbol, CaptureSet.empty, isBoxed = false, null) + private var curEnv: Env = Env(NoSymbol, nestedInOwner = false, CaptureSet.empty, isBoxed = false, null) private val myCapturedVars: util.EqHashMap[Symbol, CaptureSet] = EqHashMap() @@ -249,8 +247,12 @@ class CheckCaptures extends Recheck, SymTransformer: if !cs.isAlwaysEmpty then forallOuterEnvsUpTo(ctx.owner.topLevelClass) { env => val included = cs.filter { - case ref: TermRef => env.owner.isProperlyContainedIn(ref.symbol.owner) - case ref: ThisType => env.owner.isProperlyContainedIn(ref.cls) + case ref: TermRef => + (env.nestedInOwner || env.owner != ref.symbol.owner) + && env.owner.isContainedIn(ref.symbol.owner) + case ref: ThisType => + (env.nestedInOwner || env.owner != ref.cls) + && env.owner.isContainedIn(ref.cls) case _ => false } capt.println(i"Include call capture $included in ${env.owner}") @@ -276,16 +278,34 @@ class CheckCaptures extends Recheck, SymTransformer: * outcome of a `mightSubcapture` test. It picks `{f}` if this might subcapture Cr * and Cr otherwise. */ - override def recheckSelection(tree: Select, qualType: Type, name: Name)(using Context) = { - val selType = super.recheckSelection(tree, qualType, name) + override def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context) = { + def disambiguate(denot: Denotation): Denotation = denot match + case MultiDenotation(denot1, denot2) => + // This case can arise when we try to merge multiple types that have different + // capture sets on some part. For instance an asSeenFrom might produce + // a bi-mapped capture set arising from a substition. Applying the same substitution + // to the same type twice will nevertheless produce different capture setsw which can + // lead to a failure in disambiguation since neither alternative is better than the + // other in a frozen constraint. An example test case is disambiguate-select.scala. + // We address the problem by disambiguating while ignoring all capture sets as a fallback. + withMode(Mode.IgnoreCaptures) { + disambiguate(denot1).meet(disambiguate(denot2), qualType) + } + case _ => denot + + val selType = recheckSelection(tree, qualType, name, disambiguate) val selCs = selType.widen.captureSet if selCs.isAlwaysEmpty || selType.widen.isBoxedCapturing || qualType.isBoxedCapturing then selType else val qualCs = qualType.captureSet capt.println(i"intersect $qualType, ${selType.widen}, $qualCs, $selCs in $tree") - if qualCs.mightSubcapture(selCs) then + if qualCs.mightSubcapture(selCs) + && !selCs.mightSubcapture(qualCs) + && !pt.stripCapturing.isInstanceOf[SingletonType] + then selType.widen.stripCapturing.capturing(qualCs) + .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) else selType }//.showing(i"recheck sel $tree, $qualType = $result") @@ -302,23 +322,41 @@ class CheckCaptures extends Recheck, SymTransformer: * and Cr otherwise. */ override def recheckApply(tree: Apply, pt: Type)(using Context): Type = - includeCallCaptures(tree.symbol, tree.srcPos) - super.recheckApply(tree, pt) match - case appType @ CapturingType(appType1, refs) => - tree.fun match - case Select(qual, _) - if !tree.fun.symbol.isConstructor - && !qual.tpe.isBoxedCapturing - && !tree.args.exists(_.tpe.isBoxedCapturing) - && qual.tpe.captureSet.mightSubcapture(refs) - && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) - => - val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => - cs ++ arg.tpe.captureSet) - appType.derivedCapturingType(appType1, callCaptures) - .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) - case _ => appType - case appType => appType + val meth = tree.fun.symbol + includeCallCaptures(meth, tree.srcPos) + def mapArgUsing(f: Type => Type) = + val arg :: Nil = tree.args: @unchecked + val argType0 = f(recheckStart(arg, pt)) + val argType = super.recheckFinish(argType0, arg, pt) + super.recheckFinish(argType, tree, pt) + + if meth == defn.Caps_unsafeBox then + mapArgUsing(_.forceBoxStatus(true)) + else if meth == defn.Caps_unsafeUnbox then + mapArgUsing(_.forceBoxStatus(false)) + else if meth == defn.Caps_unsafeBoxFunArg then + mapArgUsing { + case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual) => + defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual) + } + else + super.recheckApply(tree, pt) match + case appType @ CapturingType(appType1, refs) => + tree.fun match + case Select(qual, _) + if !tree.fun.symbol.isConstructor + && !qual.tpe.isBoxedCapturing + && !tree.args.exists(_.tpe.isBoxedCapturing) + && qual.tpe.captureSet.mightSubcapture(refs) + && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) + => + val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => + cs ++ arg.tpe.captureSet) + appType.derivedCapturingType(appType1, callCaptures) + .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) + case _ => appType + case appType => appType + end recheckApply /** Handle an application of method `sym` with type `mt` to arguments of types `argTypes`. * This means: @@ -392,7 +430,8 @@ class CheckCaptures extends Recheck, SymTransformer: block match case closureDef(mdef) => pt.dealias match - case defn.FunctionOf(ptformals, _, _, _) if ptformals.forall(_.captureSet.isAlwaysEmpty) => + case defn.FunctionOf(ptformals, _, _) + if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => // Redo setup of the anonymous function so that formal parameters don't // get capture sets. This is important to avoid false widenings to `*` // when taking the base type of the actual closures's dependent function @@ -402,9 +441,10 @@ class CheckCaptures extends Recheck, SymTransformer: // First, undo the previous setup which installed a completer for `meth`. atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) .installAfter(preRecheckPhase) + // Next, update all parameter symbols to match expected formals meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => - psym.copySymDenotation(info = pformal).installAfter(preRecheckPhase) + psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) } // Next, update types of parameter ValDefs mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => @@ -412,13 +452,13 @@ class CheckCaptures extends Recheck, SymTransformer: tpt.rememberTypeAlways(pformal) } // Next, install a new completer reflecting the new parameters for the anonymous method + val mt = meth.info.asInstanceOf[MethodType] val completer = new LazyType: def complete(denot: SymDenotation)(using Context) = - denot.info = MethodType(ptformals, mdef.tpt.knownType) + denot.info = mt.companion(ptformals, mdef.tpt.knownType) .showing(i"simplify info of $meth to $result", capt) recheckDef(mdef, meth) - meth.copySymDenotation(info = completer, initFlags = meth.flags &~ Touched) - .installAfter(preRecheckPhase) + meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) case _ => case _ => super.recheckBlock(block, pt) @@ -439,7 +479,7 @@ class CheckCaptures extends Recheck, SymTransformer: if !Synthetics.isExcluded(sym) then val saved = curEnv val localSet = capturedVars(sym) - if !localSet.isAlwaysEmpty then curEnv = Env(sym, localSet, isBoxed = false, curEnv) + if !localSet.isAlwaysEmpty then curEnv = Env(sym, nestedInOwner = false, localSet, isBoxed = false, curEnv) try super.recheckDefDef(tree, sym) finally interpolateVarsIn(tree.tpt) @@ -448,19 +488,25 @@ class CheckCaptures extends Recheck, SymTransformer: /** Class-specific capture set relations: * 1. The capture set of a class includes the capture sets of its parents. * 2. The capture set of the self type of a class includes the capture set of the class. - * 3. The capture set of the self type of a class includes the capture set of every class parameter. + * 3. The capture set of the self type of a class includes the capture set of every class parameter, + * unless the parameter is marked @constructorOnly. */ override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = val saved = curEnv val localSet = capturedVars(cls) for parent <- impl.parents do // (1) checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos) - if !localSet.isAlwaysEmpty then curEnv = Env(cls, localSet, isBoxed = false, curEnv) + if !localSet.isAlwaysEmpty then curEnv = Env(cls, nestedInOwner = false, localSet, isBoxed = false, curEnv) try val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") checkSubset(localSet, thisSet, tree.srcPos) // (2) for param <- cls.paramGetters do - checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then + checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + for pureBase <- cls.pureBaseClass do + checkSubset(thisSet, + CaptureSet.empty.withDescription(i"of pure base class $pureBase"), + tree.srcPos) super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -495,14 +541,20 @@ class CheckCaptures extends Recheck, SymTransformer: recheckFinish(result, arg, pt) */ - /** If expected type `pt` is boxed, don't propagate free variables. + /** If expected type `pt` is boxed and the tree is a function or a reference, + * don't propagate free variables. * Otherwise, if the result type is boxed, simulate an unboxing by * adding all references in the boxed capture set to the current environment. */ override def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = if tree.isTerm && pt.isBoxedCapturing then val saved = curEnv - curEnv = Env(curEnv.owner, CaptureSet.Var(), isBoxed = true, curEnv) + + tree match + case _: RefTree | closureDef(_) => + curEnv = Env(curEnv.owner, nestedInOwner = false, CaptureSet.Var(), isBoxed = true, curEnv) + case _ => + try super.recheck(tree, pt) finally curEnv = saved else @@ -523,8 +575,6 @@ class CheckCaptures extends Recheck, SymTransformer: tpe case _: Try => tpe - case _: ValDef if tree.symbol.is(Mutable) => - tree.symbol.info case _ => NoType def checkNotUniversal(tp: Type): Unit = tp.widenDealias match @@ -543,11 +593,28 @@ class CheckCaptures extends Recheck, SymTransformer: /** Massage `actual` and `expected` types using the methods below before checking conformance */ override def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = - val expected1 = addOuterRefs(expected, actual) + val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) val actual1 = adaptBoxed(actual, expected1, tree.srcPos) //println(i"check conforms $actual1 <<< $expected1") super.checkConformsExpr(actual1, expected1, tree) + private def toDepFun(args: List[Type], resultType: Type, isContextual: Boolean)(using Context): Type = + MethodType.companion(isContextual = isContextual)(args, resultType) + .toFunctionType(isJava = false, alwaysDependent = true) + + /** Turn `expected` into a dependent function when `actual` is dependent. */ + private def alignDependentFunction(expected: Type, actual: Type)(using Context): Type = + def recur(expected: Type): Type = expected.dealias match + case expected @ CapturingType(eparent, refs) => + CapturingType(recur(eparent), refs, boxed = expected.isBoxed) + case expected @ defn.FunctionOf(args, resultType, isContextual) + if defn.isNonRefinedFunction(expected) && defn.isFunctionType(actual) && !defn.isNonRefinedFunction(actual) => + val expected1 = toDepFun(args, resultType, isContextual) + expected1 + case _ => + expected + recur(expected) + /** For the expected type, implement the rule outlined in #14390: * - when checking an expression `a: Ca Ta` against an expected type `Ce Te`, * - where the capture set `Ce` contains Cls.this, @@ -588,36 +655,128 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => expected - /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions */ - def adaptBoxed(actual: Type, expected: Type, pos: SrcPos)(using Context): Type = + /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions + * + * @param alwaysConst always make capture set variables constant after adaptation + */ + def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, alwaysConst: Boolean = false)(using Context): Type = /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) * to `expected` type. + * It returns the adapted type along with the additionally captured variable + * during adaptation. * @param reconstruct how to rebuild the adapted function type */ def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, - covariant: Boolean, - reconstruct: (List[Type], Type) => Type): Type = - val (eargs, eres) = expected.dealias match - case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) - case _ => (aargs.map(_ => WildcardType), WildcardType) - val aargs1 = aargs.zipWithConserve(eargs)(adapt(_, _, !covariant)) - val ares1 = adapt(ares, eres, covariant) - if (ares1 eq ares) && (aargs1 eq aargs) then actual - else reconstruct(aargs1, ares1) - - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = actual.dealias match - case actual @ CapturingType(parent, refs) => - val parent1 = adapt(parent, expected, covariant) - if actual.isBoxed != expected.isBoxedCapturing then + covariant: Boolean, boxed: Boolean, + reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val (eargs, eres) = expected.dealias.stripCapturing match + case defn.FunctionOf(eargs, eres, _) => (eargs, eres) + case expected: MethodType => (expected.paramInfos, expected.resType) + case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) + case _ => (aargs.map(_ => WildcardType), WildcardType) + val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if (ares1 eq ares) && (aargs1 eq aargs) then actual + else reconstruct(aargs1, ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + + /** Adapt type function type `actual` to the expected type. + * @see [[adaptFun]] + */ + def adaptTypeFun( + actual: Type, ares: Type, expected: Type, + covariant: Boolean, boxed: Boolean, + reconstruct: Type => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val eres = expected.dealias.stripCapturing match + case RefinedType(_, _, rinfo: PolyType) => rinfo.resType + case expected: PolyType => expected.resType + case _ => WildcardType + + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if ares1 eq ares then actual + else reconstruct(ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + end adaptTypeFun + + def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = + val arrow = if covariant then "~~>" else "<~~" + i"adapting $actual $arrow $expected" + + def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { + if expected.isInstanceOf[WildcardType] then actual + else + // Decompose the actual type into the inner shape type, the capture set and the box status + val styp = if actual.isFromJavaObject then actual else actual.stripCapturing + val cs = actual.captureSet + val boxed = actual.isBoxedCapturing + + // A box/unbox should be inserted, if the actual box status mismatches with the expectation + val needsAdaptation = boxed != expected.isBoxedCapturing + // Whether to insert a box or an unbox? + val insertBox = needsAdaptation && covariant != boxed + + // Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation + val (styp1, leaked) = styp match { + case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => + adaptFun(actual, args.init, args.last, expected, covariant, insertBox, + (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) + case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionOrPolyType(actual) => + // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) + adaptFun(actual, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, + (aargs1, ares1) => + rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) + .toFunctionType(isJava = false, alwaysDependent = true)) + case actual: MethodType => + adaptFun(actual, actual.paramInfos, actual.resType, expected, covariant, insertBox, + (aargs1, ares1) => + actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) + case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => + adaptTypeFun(actual, rinfo.resType, expected, covariant, insertBox, + ares1 => + val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) + val actual1 = actual.derivedRefinedType(p, nme, rinfo1) + actual1 + ) + case _ => + (styp, CaptureSet()) + } + + // Capture set of the term after adaptation + val cs1 = cs ++ leaked + + // Compute the adapted type + def adaptedType(resultBoxed: Boolean) = + styp1.capturing(if alwaysConst then CaptureSet(cs1.elems) else cs1).forceBoxStatus(resultBoxed) + + if needsAdaptation then val criticalSet = // the set which is not allowed to have `*` - if covariant then refs // can't box with `*` + if covariant then cs1 // can't box with `*` else expected.captureSet // can't unbox with `*` - if criticalSet.isUniversal then + if criticalSet.isUniversal && expected.isValueType then // We can't box/unbox the universal capability. Leave `actual` as it is // so we get an error in checkConforms. This tends to give better error // messages than disallowing the root capability in `criticalSet`. - capt.println(i"cannot box/unbox $actual vs $expected") + if ctx.settings.YccDebug.value then + println(i"cannot box/unbox $actual vs $expected") actual else // Disallow future addition of `*` to `criticalSet`. @@ -627,20 +786,12 @@ class CheckCaptures extends Recheck, SymTransformer: |since one of their capture sets contains the root capability `*`""", pos) } - if covariant == actual.isBoxed then markFree(refs, pos) - CapturingType(parent1, refs, boxed = !actual.isBoxed) + if !insertBox then // unboxing + markFree(criticalSet, pos) + adaptedType(!boxed) else - actual.derivedCapturingType(parent1, refs) - case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => - adaptFun(actual, args.init, args.last, expected, covariant, - (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) - case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => - // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) - adaptFun(actual, rinfo.paramInfos, rinfo.resType, expected, covariant, - (aargs1, ares1) => - rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) - .toFunctionType(isJava = false, alwaysDependent = true)) - case _ => actual + adaptedType(boxed) + } var actualw = actual.widenDealias actual match @@ -658,11 +809,49 @@ class CheckCaptures extends Recheck, SymTransformer: else actual end adaptBoxed + /** Check overrides again, taking capture sets into account. + * TODO: Can we avoid doing overrides checks twice? + * We need to do them here since only at this phase CaptureTypes are relevant + * But maybe we can then elide the check during the RefChecks phase under captureChecking? + */ + def checkOverrides = new TreeTraverser: + class OverridingPairsCheckerCC(clazz: ClassSymbol, self: Type, srcPos: SrcPos)(using Context) extends OverridingPairsChecker(clazz, self) { + /** Check subtype with box adaptation. + * This function is passed to RefChecks to check the compatibility of overriding pairs. + * @param sym symbol of the field definition that is being checked + */ + override def checkSubType(actual: Type, expected: Type)(using Context): Boolean = + val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) + val actual1 = + val saved = curEnv + try + curEnv = Env(clazz, nestedInOwner = true, capturedVars(clazz), isBoxed = false, outer0 = curEnv) + val adapted = adaptBoxed(actual, expected1, srcPos, alwaysConst = true) + actual match + case _: MethodType => + // We remove the capture set resulted from box adaptation for method types, + // since class methods are always treated as pure, and their captured variables + // are charged to the capture set of the class (which is already done during + // box adaptation). + adapted.stripCapturing + case _ => adapted + finally curEnv = saved + actual1 frozen_<:< expected1 + } + + def traverse(t: Tree)(using Context) = + t match + case t: Template => + checkAllOverrides(ctx.owner.asClass, OverridingPairsCheckerCC(_, _, t)) + case _ => + traverseChildren(t) + override def checkUnit(unit: CompilationUnit)(using Context): Unit = - Setup(preRecheckPhase, thisPhase, recheckDef) - .traverse(ctx.compilationUnit.tpdTree) + Setup(preRecheckPhase, thisPhase, recheckDef)(ctx.compilationUnit.tpdTree) + //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") withCaptureSetsExplained { super.checkUnit(unit) + checkOverrides.traverse(unit.tpdTree) checkSelfTypes(unit.tpdTree) postCheck(unit.tpdTree) if ctx.settings.YccDebug.value then @@ -697,28 +886,104 @@ class CheckCaptures extends Recheck, SymTransformer: cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) } assert(roots.nonEmpty) - for root <- roots do - checkParents(root, parentTrees(root)) + for case root: ClassSymbol <- roots do + checkSelfAgainstParents(root, root.baseClasses) val selfType = root.asClass.classInfo.selfType interpolator(startingVariance = -1).traverse(selfType) if !root.isEffectivelySealed then + def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = + cls.baseClasses.tail.exists { psym => + val selfType = psym.asClass.givenSelfType + selfType.exists && selfType.captureSet.elems == refs.elems + } selfType match - case CapturingType(_, refs: CaptureSet.Var) if !refs.isUniversal => + case CapturingType(_, refs: CaptureSet.Var) + if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => + // Forbid inferred self types unless they are already implied by an explicit + // self type in a parent. report.error( - i"""$root needs an explicitly declared self type since its - |inferred self type $selfType - |is not visible in other compilation units that define subclasses.""", + em"""$root needs an explicitly declared self type since its + |inferred self type $selfType + |is not visible in other compilation units that define subclasses.""", root.srcPos) case _ => parentTrees -= root capt.println(i"checked $root with $selfType") end checkSelfTypes + /** Heal ill-formed capture sets in the type parameter. + * + * We can push parameter refs into a capture set in type parameters + * that this type parameter can't see. + * For example, when capture checking the following expression: + * + * def usingLogFile[T](op: (f: {*} File) => T): T = ... + * + * usingLogFile[box ?1 () -> Unit] { (f: {*} File) => () => { f.write(0) } } + * + * We may propagate `f` into ?1, making ?1 ill-formed. + * This also causes soundness issues, since `f` in ?1 should be widened to `*`, + * giving rise to an error that `*` cannot be included in a boxed capture set. + * + * To solve this, we still allow ?1 to capture parameter refs like `f`, but + * compensate this by pushing the widened capture set of `f` into ?1. + * This solves the soundness issue caused by the ill-formness of ?1. + */ + private def healTypeParam(tree: Tree)(using Context): Unit = + val checker = new TypeTraverser: + private def isAllowed(ref: CaptureRef): Boolean = ref match + case ref: TermParamRef => allowed.contains(ref) + case _ => true + + // Widen the given term parameter refs x₁ : C₁ S₁ , ⋯ , xₙ : Cₙ Sₙ to their capture sets C₁ , ⋯ , Cₙ. + // + // If in these capture sets there are any capture references that are term parameter references we should avoid, + // we will widen them recursively. + private def widenParamRefs(refs: List[TermParamRef]): List[CaptureSet] = + @scala.annotation.tailrec + def recur(todos: List[TermParamRef], acc: List[CaptureSet]): List[CaptureSet] = + todos match + case Nil => acc + case ref :: rem => + val cs = ref.captureSetOfInfo + val nextAcc = cs.filter(isAllowed(_)) :: acc + val nextRem: List[TermParamRef] = (cs.elems.toList.filter(!isAllowed(_)) ++ rem).asInstanceOf + recur(nextRem, nextAcc) + recur(refs, Nil) + + private def healCaptureSet(cs: CaptureSet): Unit = + val toInclude = widenParamRefs(cs.elems.toList.filter(!isAllowed(_)).asInstanceOf) + toInclude.foreach(checkSubset(_, cs, tree.srcPos)) + + private var allowed: SimpleIdentitySet[TermParamRef] = SimpleIdentitySet.empty + + def traverse(tp: Type) = + tp match + case CapturingType(parent, refs) => + healCaptureSet(refs) + traverse(parent) + case tp @ RefinedType(parent, rname, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => + traverse(rinfo) + case tp: TermLambda => + val saved = allowed + try + tp.paramRefs.foreach(allowed += _) + traverseChildren(tp) + finally allowed = saved + case _ => + traverseChildren(tp) + + if tree.isInstanceOf[InferredTypeTree] then + checker.traverse(tree.knownType) + end healTypeParam + /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that externally visible `val`s or `def`s have empty capture sets. If not, * suggest an explicit type. This is so that separate compilation (where external * symbols have empty capture sets) gives the same results as joint compilation. + * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. + * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. */ def postCheck(unit: tpd.Tree)(using Context): Unit = unit.foreachSubTree { @@ -737,25 +1002,55 @@ class CheckCaptures extends Recheck, SymTransformer: val isLocal = sym.owner.ownersIterator.exists(_.isTerm) || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) - - // The following classes of definitions need explicit capture types ... - if !isLocal // ... since external capture types are not inferred - || sym.owner.is(Trait) // ... since we do OverridingPairs checking before capture inference - || sym.allOverriddenSymbols.nonEmpty // ... since we do override checking before capture inference - then + def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly + sym.is(Private) // private symbols can always have inferred types + || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be + // too annoying. This is a hole since a defualt getter's result type + // might leak into a type variable. + || // non-local symbols cannot have inferred types since external capture types are not inferred + isLocal // local symbols still need explicit types if + && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference + def isNotPureThis(ref: CaptureRef) = ref match { + case ref: ThisType => !ref.cls.isPureClass + case _ => true + } + if !canUseInferred then val inferred = t.tpt.knownType def checkPure(tp: Type) = tp match - case CapturingType(_, refs) if !refs.elems.isEmpty => + case CapturingType(_, refs) + if !refs.elems.filter(isNotPureThis).isEmpty => val resultStr = if t.isInstanceOf[DefDef] then " result" else "" report.error( em"""Non-local $sym cannot have an inferred$resultStr type |$inferred |with non-empty capture set $refs. - |The type needs to be declared explicitly.""", t.srcPos) + |The type needs to be declared explicitly.""".withoutDisambiguation(), + t.srcPos) case _ => inferred.foreachPart(checkPure, StopAt.Static) + case t @ TypeApply(fun, args) => + fun.knownType.widen match + case tl: PolyType => + val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => + arg.withType(arg.knownType.forceBoxStatus( + bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) + } + checkBounds(normArgs, tl) + case _ => + + args.foreach(healTypeParam(_)) case _ => } - + if !ctx.reporter.errorsReported then + // We dont report errors here if previous errors were reported, because other + // errors often result in bad applied types, but flagging these bad types gives + // often worse error messages than the original errors. + val checkApplied = new TreeTraverser: + def traverse(t: Tree)(using Context) = t match + case tree: InferredTypeTree => + case tree: New => + case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) + case _ => traverseChildren(t) + checkApplied.traverse(unit) end CaptureChecker end CheckCaptures diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index a3e88699e424..fc16422e1373 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -11,6 +11,8 @@ import ast.tpd import transform.Recheck.* import CaptureSet.IdentityCaptRefMap import Synthetics.isExcluded +import util.Property +import dotty.tools.dotc.core.Annotations.Annotation /** A tree traverser that prepares a compilation unit to be capture checked. * It does the following: @@ -37,7 +39,6 @@ extends tpd.TreeTraverser: private def depFun(tycon: Type, argTypes: List[Type], resType: Type)(using Context): Type = MethodType.companion( isContextual = defn.isContextFunctionClass(tycon.classSymbol), - isErased = defn.isErasedFunctionClass(tycon.classSymbol) )(argTypes, resType) .toFunctionType(isJava = false, alwaysDependent = true) @@ -53,7 +54,7 @@ extends tpd.TreeTraverser: val boxedRes = recur(res) if boxedRes eq res then tp else tp1.derivedAppliedType(tycon, args.init :+ boxedRes) - case tp1 @ RefinedType(_, _, rinfo) if defn.isFunctionType(tp1) => + case tp1 @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionOrPolyType(tp1) => val boxedRinfo = recur(rinfo) if boxedRinfo eq rinfo then tp else boxedRinfo.toFunctionType(isJava = false, alwaysDependent = true) @@ -98,7 +99,10 @@ extends tpd.TreeTraverser: def addCaptureRefinements(tp: Type): Type = tp match case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => tp.typeSymbol match - case cls: ClassSymbol if !defn.isFunctionClass(cls) => + case cls: ClassSymbol + if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => + // We assume that Java classes can refer to capturing Scala types only indirectly, + // using type parameters. Hence, no need to refine them. cls.paramGetters.foldLeft(tp) { (core, getter) => if getter.termRef.isTracked then val getterType = tp.memberInfo(getter).strippedDealias @@ -117,14 +121,14 @@ extends tpd.TreeTraverser: case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol if sym.isClass then - tp.typeSymbol == defn.AnyClass + sym == defn.AnyClass // we assume Any is a shorthand of {*} Any, so if Any is an upper // bound, the type is taken to be impure. else superTypeIsImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => superTypeIsImpure(tp.underlying) case tp: AndType => - superTypeIsImpure(tp.tp1) || canHaveInferredCapture(tp.tp2) + superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) case tp: OrType => superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) case _ => @@ -132,23 +136,26 @@ extends tpd.TreeTraverser: }.showing(i"super type is impure $tp = $result", capt) /** Should a capture set variable be added on type `tp`? */ - def canHaveInferredCapture(tp: Type): Boolean = { + def needsVariable(tp: Type): Boolean = { tp.typeParams.isEmpty && tp.match case tp: (TypeRef | AppliedType) => val tp1 = tp.dealias - if tp1 ne tp then canHaveInferredCapture(tp1) + if tp1 ne tp then needsVariable(tp1) else val sym = tp1.typeSymbol - if sym.isClass then !sym.isValueClass && sym != defn.AnyClass + if sym.isClass then + !sym.isPureClass && sym != defn.AnyClass else superTypeIsImpure(tp1) case tp: (RefinedOrRecType | MatchType) => - canHaveInferredCapture(tp.underlying) + needsVariable(tp.underlying) case tp: AndType => - canHaveInferredCapture(tp.tp1) && canHaveInferredCapture(tp.tp2) + needsVariable(tp.tp1) && needsVariable(tp.tp2) case tp: OrType => - canHaveInferredCapture(tp.tp1) || canHaveInferredCapture(tp.tp2) - case CapturingType(_, refs) => - refs.isConst && !refs.isUniversal + needsVariable(tp.tp1) || needsVariable(tp.tp2) + case CapturingType(parent, refs) => + needsVariable(parent) + && refs.isConst // if refs is a variable, no need to add another + && !refs.isUniversal // if refs is {*}, an added variable would not change anything case _ => false }.showing(i"can have inferred capture $tp = $result", capt) @@ -181,7 +188,7 @@ extends tpd.TreeTraverser: CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) - case _ if canHaveInferredCapture(tp) => + case _ if needsVariable(tp) => val cs = tp.dealias match case CapturingType(_, refs) => CaptureSet.Var(refs.elems) case _ => CaptureSet.Var() @@ -206,20 +213,25 @@ extends tpd.TreeTraverser: val tycon1 = this(tycon) if defn.isNonRefinedFunction(tp) then // Convert toplevel generic function types to dependent functions - val args0 = args.init - var res0 = args.last - val args1 = mapNested(args0) - val res1 = this(res0) - if isTopLevel then - depFun(tycon1, args1, res1) - .showing(i"add function refinement $tp --> $result", capt) - else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then - tp + if !defn.isFunctionSymbol(tp.typeSymbol) && (tp.dealias ne tp) then + // This type is a function after dealiasing, so we dealias and recurse. + // See #15925. + this(tp.dealias) else - tp.derivedAppliedType(tycon1, args1 :+ res1) + val args0 = args.init + var res0 = args.last + val args1 = mapNested(args0) + val res1 = this(res0) + if isTopLevel then + depFun(tycon1, args1, res1) + .showing(i"add function refinement $tp ($tycon1, $args1, $res1) (${tp.dealias}) --> $result", capt) + else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then + tp + else + tp.derivedAppliedType(tycon1, args1 :+ res1) else tp.derivedAppliedType(tycon1, args.mapConserve(arg => this(arg))) - case tp @ RefinedType(core, rname, rinfo) if defn.isFunctionType(tp) => + case tp @ RefinedType(core, rname, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => val rinfo1 = apply(rinfo) if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) else tp @@ -248,7 +260,13 @@ extends tpd.TreeTraverser: private def expandThrowsAlias(tp: Type)(using Context) = tp match case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => // hard-coded expansion since $throws aliases in stdlib are defined with `?=>` rather than `?->` - defn.FunctionOf(defn.CanThrowClass.typeRef.appliedTo(exc) :: Nil, res, isContextual = true, isErased = true) + defn.FunctionOf( + AnnotatedType( + defn.CanThrowClass.typeRef.appliedTo(exc), + Annotation(defn.ErasedParamAnnot, defn.CanThrowClass.span)) :: Nil, + res, + isContextual = true + ) case _ => tp private def expandThrowsAliases(using Context) = new TypeMap: @@ -311,7 +329,7 @@ extends tpd.TreeTraverser: args.last, CaptureSet.empty, currentCs ++ outerCs) tp.derivedAppliedType(tycon1, args1 :+ resType1) tp1.capturing(outerCs) - case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionType(tp) => + case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => propagateDepFunctionResult(mapOver(tp), currentCs ++ outerCs) .capturing(outerCs) case _ => @@ -331,11 +349,12 @@ extends tpd.TreeTraverser: else expandAbbreviations(tp1) /** Transform type of type tree, and remember the transformed type as the type the tree */ - private def transformTT(tree: TypeTree, boxed: Boolean)(using Context): Unit = - tree.rememberType( - if tree.isInstanceOf[InferredTypeTree] - then transformInferredType(tree.tpe, boxed) - else transformExplicitType(tree.tpe, boxed)) + private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = + if !tree.hasRememberedType then + tree.rememberType( + if tree.isInstanceOf[InferredTypeTree] && !exact + then transformInferredType(tree.tpe, boxed) + else transformExplicitType(tree.tpe, boxed)) /** Substitute parameter symbols in `from` to paramRefs in corresponding * method or poly types `to`. We use a single BiTypeMap to do everything. @@ -376,20 +395,32 @@ extends tpd.TreeTraverser: def traverse(tree: Tree)(using Context): Unit = tree match - case tree: DefDef if isExcluded(tree.symbol) => - return - case tree @ ValDef(_, tpt: TypeTree, _) if tree.symbol.is(Mutable) => - transformTT(tpt, boxed = true) // types of mutable variables are boxed + case tree: DefDef => + if isExcluded(tree.symbol) then + return + tree.tpt match + case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => + tree.paramss.foreach(traverse) + transformTT(tpt, boxed = false, exact = true) + traverse(tree.rhs) + //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") + case _ => + traverseChildren(tree) + case tree @ ValDef(_, tpt: TypeTree, _) => + transformTT(tpt, + boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed + exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set + ) traverse(tree.rhs) case tree @ TypeApply(fn, args) => traverse(fn) for case arg: TypeTree <- args do - transformTT(arg, boxed = true) // type arguments in type applications are boxed + transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed case _ => traverseChildren(tree) tree match case tree: TypeTree => - transformTT(tree, boxed = false) // other types are not boxed + transformTT(tree, boxed = false, exact = false) // other types are not boxed case tree: ValOrDefDef => val sym = tree.symbol @@ -460,4 +491,14 @@ extends tpd.TreeTraverser: capt.println(i"update info of ${tree.symbol} from $info to $newInfo") case _ => end traverse + + def apply(tree: Tree)(using Context): Unit = + traverse(tree)(using ctx.withProperty(Setup.IsDuringSetupKey, Some(()))) end Setup + +object Setup: + val IsDuringSetupKey = new Property.Key[Unit] + + def isDuringSetup(using Context): Boolean = + ctx.property(IsDuringSetupKey).isDefined + diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index e8f7fd502baa..dacbd27e0f35 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -31,10 +31,12 @@ object Synthetics: * The types of these symbols are transformed in a special way without * looking at the definitions's RHS */ - def needsTransform(sym: SymDenotation)(using Context): Boolean = - isSyntheticCopyMethod(sym) - || isSyntheticCompanionMethod(sym, nme.apply, nme.unapply) - || isSyntheticCopyDefaultGetterMethod(sym) + def needsTransform(symd: SymDenotation)(using Context): Boolean = + isSyntheticCopyMethod(symd) + || isSyntheticCompanionMethod(symd, nme.apply, nme.unapply) + || isSyntheticCopyDefaultGetterMethod(symd) + || (symd.symbol eq defn.Object_eq) + || (symd.symbol eq defn.Object_ne) /** Method is excluded from regular capture checking. * Excluded are synthetic class members @@ -141,13 +143,16 @@ object Synthetics: /** Drop added capture information from the type of an `unapply` */ private def dropUnapplyCaptures(info: Type)(using Context): Type = info match case info: MethodType => - val CapturingType(oldParamInfo, _) :: Nil = info.paramInfos: @unchecked - def oldResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = oldResult(tp.resType)) - case CapturingType(tp, _) => - tp - info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + info.paramInfos match + case CapturingType(oldParamInfo, _) :: Nil => + def oldResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = oldResult(tp.resType)) + case CapturingType(tp, _) => + tp + info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + case _ => + info case info: PolyType => info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) @@ -163,7 +168,9 @@ object Synthetics: sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) case nme.apply | nme.copy => sym.copySymDenotation(info = addCaptureDeps(sym.info)) - + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = + MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method * of a case class, transform it back to what it was before the CC phase. @@ -176,5 +183,7 @@ object Synthetics: sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) case nme.apply | nme.copy => sym.copySymDenotation(info = dropCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) end Synthetics \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index 68c900e405da..914df040fbf7 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -60,7 +60,7 @@ trait CliCommand: def defaultValue = s.default match case _: Int | _: String => s.default.toString case _ => "" - val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices ${s.legalChoices}" else "") + val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices : ${s.legalChoices}" else "") (s.name, info.filter(_.nonEmpty).mkString("\n")) end help diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 1b0fea9184d1..247fa28efbda 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -22,6 +22,11 @@ object Config { */ inline val checkConstraintsNonCyclic = false + /** Check that reverse dependencies in constraints are correct and complete. + * Can also be enabled using -Ycheck-constraint-deps. + */ + inline val checkConstraintDeps = false + /** Check that each constraint resulting from a subtype test * is satisfiable. Also check that a type variable instantiation * satisfies its constraints. @@ -78,13 +83,6 @@ object Config { */ inline val failOnInstantiationToNothing = false - /** Enable noDoubleDef checking if option "-YnoDoubleDefs" is set. - * The reason to have an option as well as the present global switch is - * that the noDoubleDef checking is done in a hotspot, and we do not - * want to incur the overhead of checking an option each time. - */ - inline val checkNoDoubleBindings = true - /** Check positions for consistency after parsing */ inline val checkPositions = true @@ -184,6 +182,9 @@ object Config { /** If set, prints a trace of all symbol completions */ inline val showCompletions = false + /** If set, show variable/variable reverse dependencies when printing constraints. */ + inline val showConstraintDeps = true + /** If set, method results that are context functions are flattened by adding * the parameters of the context function results to the methods themselves. * This is an optimization that reduces closure allocations. @@ -240,7 +241,7 @@ object Config { */ inline val printCaptureSetsAsPrefix = true - /** If true, allow mappping capture set variables under -Ycc with maps that are neither + /** If true, allow mappping capture set variables under captureChecking with maps that are neither * bijective nor idempotent. We currently do now know how to do this correctly in all * cases, though. */ diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 4a87f5b4a537..419ed5868cbf 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -28,6 +28,12 @@ object Feature: val symbolLiterals = deprecated("symbolLiterals") val fewerBraces = experimental("fewerBraces") val saferExceptions = experimental("saferExceptions") + val clauseInterleaving = experimental("clauseInterleaving") + val pureFunctions = experimental("pureFunctions") + val captureChecking = experimental("captureChecking") + val into = experimental("into") + + val globalOnlyImports: Set[TermName] = Set(pureFunctions, captureChecking) /** Is `feature` enabled by by a command-line setting? The enabling setting is * @@ -71,10 +77,34 @@ object Feature: def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) + def clauseInterleavingEnabled(using Context) = enabled(clauseInterleaving) + def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + /** Is pureFunctions enabled for this compilation unit? */ + def pureFunsEnabled(using Context) = + enabledBySetting(pureFunctions) + || ctx.compilationUnit.knowsPureFuns + || ccEnabled + + /** Is captureChecking enabled for this compilation unit? */ + def ccEnabled(using Context) = + enabledBySetting(captureChecking) + || ctx.compilationUnit.needsCaptureChecking + + /** Is pureFunctions enabled for any of the currently compiled compilation units? */ + def pureFunsEnabledSomewhere(using Context) = + enabledBySetting(pureFunctions) + || ctx.run != null && ctx.run.nn.pureFunsImportEncountered + || ccEnabledSomewhere + + /** Is captureChecking enabled for any of the currently compiled compilation units? */ + def ccEnabledSomewhere(using Context) = + enabledBySetting(captureChecking) + || ctx.run != null && ctx.run.nn.ccImportEncountered + def sourceVersionSetting(using Context): SourceVersion = SourceVersion.valueOf(ctx.settings.source.value) @@ -83,7 +113,11 @@ object Feature: case Some(v) => v case none => sourceVersionSetting - def migrateTo3(using Context): Boolean = sourceVersion == `3.0-migration` + def migrateTo3(using Context): Boolean = + sourceVersion == `3.0-migration` + + def fewerBracesEnabled(using Context) = + sourceVersion.isAtLeast(`3.3`) || enabled(fewerBraces) /** If current source migrates to `version`, issue given warning message * and return `true`, otherwise return `false`. @@ -99,7 +133,7 @@ object Feature: def checkExperimentalFeature(which: String, srcPos: SrcPos, note: => String = "")(using Context) = if !isExperimentalEnabled then - report.error(i"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) + report.error(em"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = if !isExperimentalEnabled then @@ -110,7 +144,7 @@ object Feature: i"${sym.owner} is marked @experimental" else i"$sym inherits @experimental" - report.error(s"$symMsg and therefore may only be used in an experimental scope.", srcPos) + report.error(em"$symMsg and therefore may only be used in an experimental scope.", srcPos) /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ def checkExperimentalSettings(using Context): Unit = @@ -121,4 +155,21 @@ object Feature: def isExperimentalEnabled(using Context): Boolean = Properties.experimental && !ctx.settings.YnoExperimental.value + /** Handle language import `import language..` if it is one + * of the global imports `pureFunctions` or `captureChecking`. In this case + * make the compilation unit's and current run's fields accordingly. + * @return true iff import that was handled + */ + def handleGlobalLanguageImport(prefix: TermName, imported: Name)(using Context): Boolean = + val fullFeatureName = QualifiedName(prefix, imported.asTermName) + if fullFeatureName == pureFunctions then + ctx.compilationUnit.knowsPureFuns = true + if ctx.run != null then ctx.run.nn.pureFunsImportEncountered = true + true + else if fullFeatureName == captureChecking then + ctx.compilationUnit.needsCaptureChecking = true + if ctx.run != null then ctx.run.nn.ccImportEncountered = true + true + else + false end Feature diff --git a/compiler/src/dotty/tools/dotc/config/Printers.scala b/compiler/src/dotty/tools/dotc/config/Printers.scala index ecb189de9bb3..63d616e1ce3d 100644 --- a/compiler/src/dotty/tools/dotc/config/Printers.scala +++ b/compiler/src/dotty/tools/dotc/config/Printers.scala @@ -32,6 +32,7 @@ object Printers { val init = noPrinter val inlining = noPrinter val interactiv = noPrinter + val macroAnnot = noPrinter val matchTypes = noPrinter val nullables = noPrinter val overload = noPrinter diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 9e34f8d726b5..32cb030a3296 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -15,9 +15,9 @@ import scala.util.chaining._ class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: - // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` + // Keep synchronized with `classfileVersion` in `BackendUtils` private val minTargetVersion = 8 - private val maxTargetVersion = 19 + private val maxTargetVersion = 20 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) @@ -64,7 +64,6 @@ trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSetti val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) - val YindentColons: Setting[Boolean] = BooleanSetting("-Yindent-colons", "(disabled: use -language:experimental.fewerBraces instead)") /* Decompiler settings */ val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) @@ -149,25 +148,79 @@ private sealed trait VerboseSettings: val Vprofile: Setting[Boolean] = BooleanSetting("-Vprofile", "Show metrics about sources and internal representations to estimate compile-time complexity.") val VprofileSortedBy = ChoiceSetting("-Vprofile-sorted-by", "key", "Show metrics about sources and internal representations sorted by given column name", List("name", "path", "lines", "tokens", "tasty", "complexity"), "") val VprofileDetails = IntSetting("-Vprofile-details", "Show metrics about sources and internal representations of the most complex methods", 0) + val VreplMaxPrintElements: Setting[Int] = IntSetting("-Vrepl-max-print-elements", "Number of elements to be printed before output is truncated.", 1000) + val VreplMaxPrintCharacters: Setting[Int] = IntSetting("-Vrepl-max-print-characters", "Number of characters to be printed before output is truncated.", 50000) /** -W "Warnings" settings */ private sealed trait WarningSettings: self: SettingGroup => + import Setting.ChoiceWithHelp + val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) + val WvalueDiscard: Setting[Boolean] = BooleanSetting("-Wvalue-discard", "Warn when non-Unit expression results are unused.") + val WNonUnitStatement = BooleanSetting("-Wnonunit-statement", "Warn when block statements are non-Unit expressions.") - val Wunused: Setting[List[String]] = MultiChoiceSetting( + val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( name = "-Wunused", helpArg = "warning", descr = "Enable or disable specific `unused` warnings", - choices = List("nowarn", "all"), + choices = List( + ChoiceWithHelp("nowarn", ""), + ChoiceWithHelp("all",""), + ChoiceWithHelp( + name = "imports", + description = "Warn if an import selector is not referenced.\n" + + "NOTE : overrided by -Wunused:strict-no-implicit-warn"), + ChoiceWithHelp("privates","Warn if a private member is unused"), + ChoiceWithHelp("locals","Warn if a local definition is unused"), + ChoiceWithHelp("explicits","Warn if an explicit parameter is unused"), + ChoiceWithHelp("implicits","Warn if an implicit parameter is unused"), + ChoiceWithHelp("params","Enable -Wunused:explicits,implicits"), + ChoiceWithHelp("linted","Enable -Wunused:imports,privates,locals,implicits"), + ChoiceWithHelp( + name = "strict-no-implicit-warn", + description = "Same as -Wunused:import, only for imports of explicit named members.\n" + + "NOTE : This overrides -Wunused:imports and NOT set by -Wunused:all" + ), + // ChoiceWithHelp("patvars","Warn if a variable bound in a pattern is unused"), + ChoiceWithHelp( + name = "unsafe-warn-patvars", + description = "(UNSAFE) Warn if a variable bound in a pattern is unused.\n" + + "This warning can generate false positive, as warning cannot be\n" + + "suppressed yet." + ) + ), default = Nil ) object WunusedHas: + def isChoiceSet(s: String)(using Context) = Wunused.value.pipe(us => us.contains(s)) def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) def nowarn(using Context) = allOr("nowarn") + // overrided by strict-no-implicit-warn + def imports(using Context) = + (allOr("imports") || allOr("linted")) && !(strictNoImplicitWarn) + def locals(using Context) = + allOr("locals") || allOr("linted") + /** -Wunused:explicits OR -Wunused:params */ + def explicits(using Context) = + allOr("explicits") || allOr("params") + /** -Wunused:implicits OR -Wunused:params */ + def implicits(using Context) = + allOr("implicits") || allOr("params") || allOr("linted") + def params(using Context) = allOr("params") + def privates(using Context) = + allOr("privates") || allOr("linted") + def patvars(using Context) = + isChoiceSet("unsafe-warn-patvars") // not with "all" + // allOr("patvars") // todo : rename once fixed + def linted(using Context) = + allOr("linted") + def strictNoImplicitWarn(using Context) = + isChoiceSet("strict-no-implicit-warn") + val Wconf: Setting[List[String]] = MultiStringSetting( "-Wconf", "patterns", @@ -280,6 +333,7 @@ private sealed trait YSettings: val Yscala2Unpickler: Setting[String] = StringSetting("-Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") val YnoImports: Setting[Boolean] = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") + val Yimports: Setting[List[String]] = MultiStringSetting("-Yimports", helpArg="", "Custom root imports. If set, none of scala.*, java.lang.*, or Predef.* will be imported unless explicitly included.") val YnoGenericSig: Setting[Boolean] = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.") val YnoPredef: Setting[Boolean] = BooleanSetting("-Yno-predef", "Compile without importing Predef.") val Yskip: Setting[List[String]] = PhasesSetting("-Yskip", "Skip") @@ -307,10 +361,12 @@ private sealed trait YSettings: val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") + val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features") + val YlegacyLazyVals: Setting[Boolean] = BooleanSetting("-Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals") val YprofileEnabled: Setting[Boolean] = BooleanSetting("-Yprofile-enabled", "Enable profiling.") val YprofileDestination: Setting[String] = StringSetting("-Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") @@ -326,9 +382,8 @@ private sealed trait YSettings: val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects") val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation") val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") - val Ycc: Setting[Boolean] = BooleanSetting("-Ycc", "Check captured references (warning: extremely experimental and unstable)") - val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with -Ycc, debug info for captured references") - val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with -Ycc, suppress type abbreviations") + val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") + val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") /** Area-specific debug output */ val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 277833afbd5d..34e5582e8a91 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -11,6 +11,7 @@ import annotation.tailrec import collection.mutable.ArrayBuffer import reflect.ClassTag import scala.util.{Success, Failure} +import dotty.tools.dotc.config.Settings.Setting.ChoiceWithHelp object Settings: @@ -69,11 +70,11 @@ object Settings: def updateIn(state: SettingsState, x: Any): SettingsState = x match case _: T => state.update(idx, x) - case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${implicitly[ClassTag[T]]}") + case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${summon[ClassTag[T]]}") def isDefaultIn(state: SettingsState): Boolean = valueIn(state) == default - def isMultivalue: Boolean = implicitly[ClassTag[T]] == ListTag + def isMultivalue: Boolean = summon[ClassTag[T]] == ListTag def legalChoices: String = choices match { @@ -106,6 +107,11 @@ object Settings: def missingArg = fail(s"missing argument for option $name", args) + def setBoolean(argValue: String, args: List[String]) = + if argValue.equalsIgnoreCase("true") || argValue.isEmpty then update(true, args) + else if argValue.equalsIgnoreCase("false") then update(false, args) + else fail(s"$argValue is not a valid choice for boolean setting $name", args) + def setString(argValue: String, args: List[String]) = choices match case Some(xs) if !xs.contains(argValue) => @@ -126,9 +132,9 @@ object Settings: catch case _: NumberFormatException => fail(s"$argValue is not an integer argument for $name", args) - def doSet(argRest: String) = ((implicitly[ClassTag[T]], args): @unchecked) match { + def doSet(argRest: String) = ((summon[ClassTag[T]], args): @unchecked) match { case (BooleanTag, _) => - update(true, args) + setBoolean(argRest, args) case (OptionTag, _) => update(Some(propertyClass.get.getConstructor().newInstance()), args) case (ListTag, _) => @@ -184,6 +190,19 @@ object Settings: def update(x: T)(using Context): SettingsState = setting.updateIn(ctx.settingsState, x) def isDefault(using Context): Boolean = setting.isDefaultIn(ctx.settingsState) + /** + * A choice with help description. + * + * NOTE : `equals` and `toString` have special behaviors + */ + case class ChoiceWithHelp[T](name: T, description: String): + override def equals(x: Any): Boolean = x match + case s:String => s == name.toString() + case _ => false + override def toString(): String = + s"\n- $name${if description.isEmpty() then "" else s" :\n\t${description.replace("\n","\n\t")}"}" + end Setting + class SettingGroup { private val _allSettings = new ArrayBuffer[Setting[?]] @@ -265,6 +284,9 @@ object Settings: def MultiChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def MultiChoiceHelpSetting(name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = publish(Setting(name, descr, default, aliases = aliases)) @@ -290,6 +312,6 @@ object Settings: publish(Setting(name, descr, default)) def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = - publish(Setting(name, descr, None, propertyClass = Some(implicitly[ClassTag[T]].runtimeClass), aliases = aliases)) + publish(Setting(name, descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases)) } end Settings diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 545e2f2d9b42..4b9b1b247856 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -8,6 +8,7 @@ import util.Property enum SourceVersion: case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. case `3.2-migration`, `3.2` + case `3.3-migration`, `3.3` case `future-migration`, `future` val isMigrating: Boolean = toString.endsWith("-migration") @@ -18,7 +19,7 @@ enum SourceVersion: def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.2` + def defaultSourceVersion = `3.3` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index fccf43a3e834..202f3eb26e41 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -2,12 +2,13 @@ package dotty.tools package dotc package core -import Symbols._, Types._, Contexts._, Constants._ -import dotty.tools.dotc.ast.tpd, tpd.* +import Symbols._, Types._, Contexts._, Constants._, Phases.* +import ast.tpd, tpd.* import util.Spans.Span import printing.{Showable, Printer} import printing.Texts.Text -import annotation.internal.sharable + +import scala.annotation.internal.sharable object Annotations { @@ -20,6 +21,8 @@ object Annotations { def symbol(using Context): Symbol = annotClass(tree) + def hasSymbol(sym: Symbol)(using Context) = symbol == sym + def matches(cls: Symbol)(using Context): Boolean = symbol.derivesFrom(cls) def appliesToModule: Boolean = true // for now; see remark in SymDenotations @@ -85,6 +88,22 @@ object Annotations { def sameAnnotation(that: Annotation)(using Context): Boolean = symbol == that.symbol && tree.sameTree(that.tree) + def hasOneOfMetaAnnotation(metaSyms: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): Boolean = atPhaseNoLater(erasurePhase) { + def go(metaSyms: Set[Symbol]) = + def recTp(tp: Type): Boolean = tp.dealiasKeepAnnots match + case AnnotatedType(parent, metaAnnot) => metaSyms.exists(metaAnnot.matches) || recTp(parent) + case _ => false + def rec(tree: Tree): Boolean = methPart(tree) match + case New(tpt) => rec(tpt) + case Select(qual, _) => rec(qual) + case Annotated(arg, metaAnnot) => metaSyms.exists(metaAnnot.tpe.classSymbol.derivesFrom) || rec(arg) + case t @ Ident(_) => recTp(t.tpe) + case Typed(expr, _) => rec(expr) + case _ => false + metaSyms.exists(symbol.hasAnnotation) || rec(tree) + go(metaSyms) || orNoneOf.nonEmpty && !go(orNoneOf) + } + /** Operations for hash-consing, can be overridden */ def hash: Int = System.identityHashCode(this) def eql(that: Annotation) = this eq that @@ -127,6 +146,11 @@ object Annotations { override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] } + class DeferredSymAndTree(symFn: Context ?=> Symbol, treeFn: Context ?=> Tree) + extends LazyAnnotation: + protected var mySym: Symbol | (Context ?=> Symbol) | Null = ctx ?=> symFn(using ctx) + protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) + /** An annotation indicating the body of a right-hand side, * typically of an inline method. Treated specially in * pickling/unpickling and TypeTreeMaps @@ -171,40 +195,31 @@ object Annotations { def apply(tree: Tree): ConcreteAnnotation = ConcreteAnnotation(tree) - def apply(cls: ClassSymbol)(using Context): Annotation = - apply(cls, Nil) - - def apply(cls: ClassSymbol, arg: Tree)(using Context): Annotation = - apply(cls, arg :: Nil) - - def apply(cls: ClassSymbol, arg1: Tree, arg2: Tree)(using Context): Annotation = - apply(cls, arg1 :: arg2 :: Nil) + def apply(cls: ClassSymbol, span: Span)(using Context): Annotation = + apply(cls, Nil, span) - def apply(cls: ClassSymbol, args: List[Tree])(using Context): Annotation = - apply(cls.typeRef, args) + def apply(cls: ClassSymbol, arg: Tree, span: Span)(using Context): Annotation = + apply(cls, arg :: Nil, span) - def apply(atp: Type, arg: Tree)(using Context): Annotation = - apply(atp, arg :: Nil) + def apply(cls: ClassSymbol, args: List[Tree], span: Span)(using Context): Annotation = + apply(cls.typeRef, args, span) - def apply(atp: Type, arg1: Tree, arg2: Tree)(using Context): Annotation = - apply(atp, arg1 :: arg2 :: Nil) + def apply(atp: Type, arg: Tree, span: Span)(using Context): Annotation = + apply(atp, arg :: Nil, span) - def apply(atp: Type, args: List[Tree])(using Context): Annotation = - apply(New(atp, args)) + def apply(atp: Type, args: List[Tree], span: Span)(using Context): Annotation = + apply(New(atp, args).withSpan(span)) /** Create an annotation where the tree is computed lazily. */ - def deferred(sym: Symbol)(treeFn: Context ?=> Tree)(using Context): Annotation = + def deferred(sym: Symbol)(treeFn: Context ?=> Tree): Annotation = new LazyAnnotation { protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) protected var mySym: Symbol | (Context ?=> Symbol) | Null = sym } /** Create an annotation where the symbol and the tree are computed lazily. */ - def deferredSymAndTree(symFn: Context ?=> Symbol)(treeFn: Context ?=> Tree)(using Context): Annotation = - new LazyAnnotation { - protected var mySym: Symbol | (Context ?=> Symbol) | Null = ctx ?=> symFn(using ctx) - protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) - } + def deferredSymAndTree(symFn: Context ?=> Symbol)(treeFn: Context ?=> Tree): Annotation = + DeferredSymAndTree(symFn, treeFn) /** Extractor for child annotations */ object Child { @@ -230,15 +245,15 @@ object Annotations { else None } - def makeSourceFile(path: String)(using Context): Annotation = - apply(defn.SourceFileAnnot, Literal(Constant(path))) + def makeSourceFile(path: String, span: Span)(using Context): Annotation = + apply(defn.SourceFileAnnot, Literal(Constant(path)), span) } @sharable val EmptyAnnotation = Annotation(EmptyTree) def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = { val tref = cls.typeRef - Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref)) + Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref), cls.span) } /** Extracts the type of the thrown exception from an annotation. diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index 4b441d512dec..a61701eee2d7 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -149,7 +149,7 @@ class CheckRealizable(using Context) { */ private def boundsRealizability(tp: Type) = { - val memberProblems = withMode(Mode.CheckBounds) { + val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { for { mbr <- tp.nonClassTypeMembers if !(mbr.info.loBound <:< mbr.info.hiBound) @@ -157,7 +157,7 @@ class CheckRealizable(using Context) { yield new HasProblemBounds(mbr.name, mbr.info) } - val refinementProblems = withMode(Mode.CheckBounds) { + val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { for { name <- refinedNames(tp) if (name.isTypeName) diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala index 07b6e71cdcc9..c634f847e510 100644 --- a/compiler/src/dotty/tools/dotc/core/Constraint.scala +++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala @@ -4,6 +4,7 @@ package core import Types._, Contexts._ import printing.Showable +import util.{SimpleIdentitySet, SimpleIdentityMap} /** Constraint over undetermined type parameters. Constraints are built * over values of the following types: @@ -70,6 +71,9 @@ abstract class Constraint extends Showable { */ def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds + /** The current bounds of type parameter `param` */ + def bounds(param: TypeParamRef)(using Context): TypeBounds + /** A new constraint which is derived from this constraint by adding * entries for all type parameters of `poly`. * @param tvars A list of type variables associated with the params, @@ -87,6 +91,8 @@ abstract class Constraint extends Showable { * - Another type, indicating a solution for the parameter * * @pre `this contains param`. + * @pre `tp` does not contain top-level references to `param` + * (see `validBoundsFor`) */ def updateEntry(param: TypeParamRef, tp: Type)(using Context): This @@ -128,7 +134,7 @@ abstract class Constraint extends Showable { /** Is `tv` marked as hard in the constraint? */ def isHard(tv: TypeVar): Boolean - + /** The same as this constraint, but with `tv` marked as hard. */ def withHard(tv: TypeVar)(using Context): This @@ -165,15 +171,49 @@ abstract class Constraint extends Showable { */ def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean - /** Check that no constrained parameter contains itself as a bound */ - def checkNonCyclic()(using Context): this.type - /** Does `param` occur at the toplevel in `tp` ? * Toplevel means: the type itself or a factor in some * combination of `&` or `|` types. */ def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean + /** Sanitize `bound` to make it either a valid upper or lower bound for + * `param` depending on `isUpper`. + * + * Toplevel references to `param`, are replaced by `Any` if `isUpper` is true + * and `Nothing` otherwise. + * + * @see `occursAtTopLevel` for a definition of "toplevel" + * @see `validBoundsFor` to sanitize both the lower and upper bound at once. + */ + def validBoundFor(param: TypeParamRef, bound: Type, isUpper: Boolean)(using Context): Type + + /** Sanitize `bounds` to make them valid constraints for `param`. + * + * @see `validBoundFor` for details. + */ + def validBoundsFor(param: TypeParamRef, bounds: TypeBounds)(using Context): Type + + /** A string that shows the reverse dependencies maintained by this constraint + * (coDeps and contraDeps for OrderingConstraints). + */ + def depsToString(using Context): String + + /** Does the constraint restricted to variables outside `except` depend on `tv` + * in the given direction `co`? + * @param `co` If true, test whether the constraint would change if the variable is made larger + * otherwise, test whether the constraint would change if the variable is made smaller. + */ + def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean + + /** Depending on Config settngs: + * - Under `checkConstraintsNonCyclic`, check that no constrained + * parameter contains itself as a bound. + * - Under `checkConstraintDeps`, check hat reverse dependencies in + * constraints are correct and complete. + */ + def checkWellFormed()(using Context): this.type + /** Check that constraint only refers to TypeParamRefs bound by itself */ def checkClosed()(using Context): Unit diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 1dfa04822766..9ffe2bda73cb 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -58,6 +58,12 @@ trait ConstraintHandling { */ protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty + /** Used for match type reduction: If false, we don't recognize an abstract type + * to be a subtype type of any of its base classes. This is in place only at the + * toplevel; it is turned on again when we add parts of the scrutinee to the constraint. + */ + protected var canWidenAbstract: Boolean = true + protected var myNecessaryConstraintsOnly = false /** When collecting the constraints needed for a particular subtyping * judgment to be true, we sometimes need to approximate the constraint @@ -146,8 +152,8 @@ trait ConstraintHandling { return param LevelAvoidMap(0, maxLevel)(param) match case freshVar: TypeVar => freshVar.origin - case _ => throw new TypeError( - i"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") + case _ => throw TypeError( + em"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) @@ -251,7 +257,7 @@ trait ConstraintHandling { end LevelAvoidMap /** Approximate `rawBound` if needed to make it a legal bound of `param` by - * avoiding wildcards and types with a level strictly greater than its + * avoiding cycles, wildcards and types with a level strictly greater than its * `nestingLevel`. * * Note that level-checking must be performed here and cannot be delayed @@ -277,7 +283,7 @@ trait ConstraintHandling { // This is necessary for i8900-unflip.scala to typecheck. val v = if necessaryConstraintsOnly then -this.variance else this.variance atVariance(v)(super.legalVar(tp)) - approx(rawBound) + constraint.validBoundFor(param, approx(rawBound), isUpper) end legalBound protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = @@ -407,8 +413,10 @@ trait ConstraintHandling { constraint = constraint.addLess(p2, p1, direction = if pKept eq p1 then KeepParam2 else KeepParam1) - val boundKept = constraint.nonParamBounds(pKept).substParam(pRemoved, pKept) - var boundRemoved = constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept) + val boundKept = constraint.validBoundsFor(pKept, + constraint.nonParamBounds( pKept).substParam(pRemoved, pKept).bounds) + var boundRemoved = constraint.validBoundsFor(pKept, + constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept).bounds) if level1 != level2 then boundRemoved = LevelAvoidMap(-1, math.min(level1, level2))(boundRemoved) @@ -550,6 +558,13 @@ trait ConstraintHandling { inst end approximation + private def isTransparent(tp: Type, traitOnly: Boolean)(using Context): Boolean = tp match + case AndType(tp1, tp2) => + isTransparent(tp1, traitOnly) && isTransparent(tp2, traitOnly) + case _ => + val cls = tp.underlyingClassRef(refinementOK = false).typeSymbol + cls.isTransparentClass && (!traitOnly || cls.is(Trait)) + /** If `tp` is an intersection such that some operands are transparent trait instances * and others are not, replace as many transparent trait instances as possible with Any * as long as the result is still a subtype of `bound`. But fall back to the @@ -562,18 +577,17 @@ trait ConstraintHandling { var dropped: List[Type] = List() // the types dropped so far, last one on top def dropOneTransparentTrait(tp: Type): Type = - val tpd = tp.dealias - if tpd.typeSymbol.isTransparentTrait && !tpd.isLambdaSub && !kept.contains(tpd) then - dropped = tpd :: dropped + if isTransparent(tp, traitOnly = true) && !kept.contains(tp) then + dropped = tp :: dropped defn.AnyType - else tpd match + else tp match case AndType(tp1, tp2) => val tp1w = dropOneTransparentTrait(tp1) if tp1w ne tp1 then tp1w & tp2 else val tp2w = dropOneTransparentTrait(tp2) if tp2w ne tp2 then tp1 & tp2w - else tpd + else tp case _ => tp @@ -612,8 +626,9 @@ trait ConstraintHandling { /** Widen inferred type `inst` with upper `bound`, according to the following rules: * 1. If `inst` is a singleton type, or a union containing some singleton types, - * widen (all) the singleton type(s), provided the result is a subtype of `bound`. - * (i.e. `inst.widenSingletons <:< bound` succeeds with satisfiable constraint) + * widen (all) the singleton type(s), provided the result is a subtype of `bound` + * (i.e. `inst.widenSingletons <:< bound` succeeds with satisfiable constraint) and + * is not transparent according to `isTransparent`. * 2a. If `inst` is a union type and `widenUnions` is true, approximate the union type * from above by an intersection of all common base types, provided the result * is a subtype of `bound`. @@ -635,7 +650,7 @@ trait ConstraintHandling { def widenOr(tp: Type) = if widenUnions then val tpw = tp.widenUnion - if (tpw ne tp) && (tpw <:< bound) then tpw else tp + if (tpw ne tp) && !isTransparent(tpw, traitOnly = false) && (tpw <:< bound) then tpw else tp else tp.hardenUnions def widenSingle(tp: Type) = @@ -648,7 +663,12 @@ trait ConstraintHandling { val wideInst = if isSingleton(bound) then inst - else dropTransparentTraits(widenIrreducible(widenOr(widenSingle(inst))), bound) + else + val widenedFromSingle = widenSingle(inst) + val widenedFromUnion = widenOr(widenedFromSingle) + val widened = dropTransparentTraits(widenedFromUnion, bound) + widenIrreducible(widened) + wideInst match case wideInst: TypeRef if wideInst.symbol.is(Module) => TermRef(wideInst.prefix, wideInst.symbol.sourceModule) @@ -729,16 +749,7 @@ trait ConstraintHandling { } /** The current bounds of type parameter `param` */ - def bounds(param: TypeParamRef)(using Context): TypeBounds = { - val e = constraint.entry(param) - if (e.exists) e.bounds - else { - // TODO: should we change the type of paramInfos to nullable? - val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos - if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala - else TypeBounds.empty - } - } + def bounds(param: TypeParamRef)(using Context): TypeBounds = constraint.bounds(param) /** Add type lambda `tl`, possibly with type variables `tvars`, to current constraint * and propagate all bounds. @@ -839,13 +850,17 @@ trait ConstraintHandling { //checkPropagated(s"adding $description")(true) // DEBUG in case following fails checkPropagated(s"added $description") { addConstraintInvocations += 1 + val saved = canWidenAbstract + canWidenAbstract = true try bound match case bound: TypeParamRef if constraint contains bound => addParamBound(bound) case _ => val pbound = avoidLambdaParams(bound) kindCompatible(param, pbound) && addBoundTransitively(param, pbound, !fromBelow) - finally addConstraintInvocations -= 1 + finally + canWidenAbstract = saved + addConstraintInvocations -= 1 } end addConstraint diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 919598c41d6e..e0e43169820a 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -28,6 +28,7 @@ import printing._ import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} import classfile.ReusableDataReader import StdNames.nme +import compiletime.uninitialized import scala.annotation.internal.sharable @@ -123,7 +124,9 @@ object Contexts { */ abstract class Context(val base: ContextBase) { thiscontext => - given Context = this + protected given Context = this + + def outer: Context /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */ def outersIterator: Iterator[Context] = new Iterator[Context] { @@ -132,65 +135,21 @@ object Contexts { def next = { val c = current; current = current.outer; c } } - /** The outer context */ - private var _outer: Context = _ - protected def outer_=(outer: Context): Unit = _outer = outer - final def outer: Context = _outer - - /** The current context */ - private var _period: Period = _ - protected def period_=(period: Period): Unit = { - assert(period.firstPhaseId == period.lastPhaseId, period) - _period = period - } - final def period: Period = _period - - /** The scope nesting level */ - private var _mode: Mode = _ - protected def mode_=(mode: Mode): Unit = _mode = mode - final def mode: Mode = _mode - - /** The current owner symbol */ - private var _owner: Symbol = _ - protected def owner_=(owner: Symbol): Unit = _owner = owner - final def owner: Symbol = _owner - - /** The current tree */ - private var _tree: Tree[? >: Untyped]= _ - protected def tree_=(tree: Tree[? >: Untyped]): Unit = _tree = tree - final def tree: Tree[? >: Untyped] = _tree - - /** The current scope */ - private var _scope: Scope = _ - protected def scope_=(scope: Scope): Unit = _scope = scope - final def scope: Scope = _scope - - /** The current typerstate */ - private var _typerState: TyperState = _ - protected def typerState_=(typerState: TyperState): Unit = _typerState = typerState - final def typerState: TyperState = _typerState - - /** The current bounds in force for type parameters appearing in a GADT */ - private var _gadt: GadtConstraint = _ - protected def gadt_=(gadt: GadtConstraint): Unit = _gadt = gadt - final def gadt: GadtConstraint = _gadt - - /** The history of implicit searches that are currently active */ - private var _searchHistory: SearchHistory = _ - protected def searchHistory_= (searchHistory: SearchHistory): Unit = _searchHistory = searchHistory - final def searchHistory: SearchHistory = _searchHistory - - /** The current source file */ - private var _source: SourceFile = _ - protected def source_=(source: SourceFile): Unit = _source = source - final def source: SourceFile = _source + def period: Period + def mode: Mode + def owner: Symbol + def tree: Tree[?] + def scope: Scope + def typerState: TyperState + def gadt: GadtConstraint = gadtState.gadt + def gadtState: GadtState + def searchHistory: SearchHistory + def source: SourceFile /** A map in which more contextual properties can be stored * Typically used for attributes that are read and written only in special situations. */ - private var _moreProperties: Map[Key[Any], Any] = _ - protected def moreProperties_=(moreProperties: Map[Key[Any], Any]): Unit = _moreProperties = moreProperties - final def moreProperties: Map[Key[Any], Any] = _moreProperties + def moreProperties: Map[Key[Any], Any] def property[T](key: Key[T]): Option[T] = moreProperties.get(key).asInstanceOf[Option[T]] @@ -200,9 +159,7 @@ object Contexts { * Access to store entries is much faster than access to properties, and only * slightly slower than a normal field access would be. */ - private var _store: Store = _ - protected def store_=(store: Store): Unit = _store = store - final def store: Store = _store + def store: Store /** The compiler callback implementation, or null if no callback will be called. */ def compilerCallback: CompilerCallback = store(compilerCallbackLoc) @@ -240,7 +197,7 @@ object Contexts { def typeAssigner: TypeAssigner = store(typeAssignerLoc) /** The new implicit references that are introduced by this scope */ - protected var implicitsCache: ContextualImplicits | Null = null + private var implicitsCache: ContextualImplicits | Null = null def implicits: ContextualImplicits = { if (implicitsCache == null) implicitsCache = { @@ -299,13 +256,12 @@ object Contexts { file catch case ex: InvalidPathException => - report.error(s"invalid file path: ${ex.getMessage}") + report.error(em"invalid file path: ${ex.getMessage}") NoAbstractFile /** AbstractFile with given path, memoized */ def getFile(name: String): AbstractFile = getFile(name.toTermName) - private var related: SimpleIdentityMap[Phase | SourceFile, Context] | Null = null private def lookup(key: Phase | SourceFile): Context | Null = @@ -356,7 +312,7 @@ object Contexts { /** If -Ydebug is on, the top of the stack trace where this context * was created, otherwise `null`. */ - private var creationTrace: Array[StackTraceElement] = _ + private var creationTrace: Array[StackTraceElement] = uninitialized private def setCreationTrace() = creationTrace = (new Throwable).getStackTrace().take(20) @@ -455,7 +411,7 @@ object Contexts { val constrCtx = outersIterator.dropWhile(_.outer.owner == owner).next() superOrThisCallContext(owner, constrCtx.scope) .setTyperState(typerState) - .setGadt(gadt) + .setGadtState(gadtState) .fresh .setScope(this.scope) } @@ -469,7 +425,7 @@ object Contexts { } /** The context of expression `expr` seen as a member of a statement sequence */ - def exprContext(stat: Tree[? >: Untyped], exprOwner: Symbol): Context = + def exprContext(stat: Tree[?], exprOwner: Symbol): Context = if (exprOwner == this.owner) this else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext else fresh.setOwner(exprOwner) @@ -488,39 +444,20 @@ object Contexts { def useColors: Boolean = base.settings.color.value == "always" - /** Is the explicit nulls option set? */ - def explicitNulls: Boolean = base.settings.YexplicitNulls.value + def withColors: FreshContext = + fresh.setSetting(ctx.settings.color, "always") - /** Initialize all context fields, except typerState, which has to be set separately - * @param outer The outer context - * @param origin The context from which fields are copied - */ - private[Contexts] def init(outer: Context, origin: Context): this.type = { - _outer = outer - _period = origin.period - _mode = origin.mode - _owner = origin.owner - _tree = origin.tree - _scope = origin.scope - _gadt = origin.gadt - _searchHistory = origin.searchHistory - _source = origin.source - _moreProperties = origin.moreProperties - _store = origin.store - this - } + def withoutColors: FreshContext = + fresh.setSetting(ctx.settings.color, "never") - def reuseIn(outer: Context): this.type = - implicitsCache = null - related = null - init(outer, outer) + /** Is the explicit nulls option set? */ + def explicitNulls: Boolean = base.settings.YexplicitNulls.value /** A fresh clone of this context embedded in this context. */ def fresh: FreshContext = freshOver(this) /** A fresh clone of this context embedded in the specified `outer` context. */ def freshOver(outer: Context): FreshContext = - util.Stats.record("Context.fresh") FreshContext(base).init(outer, this).setTyperState(this.typerState) final def withOwner(owner: Symbol): Context = @@ -565,6 +502,15 @@ object Contexts { def uniques: util.WeakHashSet[Type] = base.uniques def initialize()(using Context): Unit = base.initialize() + + protected def resetCaches(): Unit = + implicitsCache = null + related = null + + /** Reuse this context as a fresh context nested inside `outer` + * But keep the typerstate, this one has to be set explicitly if needed. + */ + def reuseIn(outer: Context): this.type } /** A condensed context provides only a small memory footprint over @@ -579,55 +525,138 @@ object Contexts { * of its attributes using the with... methods. */ class FreshContext(base: ContextBase) extends Context(base) { + util.Stats.record("Context.fresh") + + private var _outer: Context = uninitialized + def outer: Context = _outer + + private var _period: Period = uninitialized + final def period: Period = _period + + private var _mode: Mode = uninitialized + final def mode: Mode = _mode + + private var _owner: Symbol = uninitialized + final def owner: Symbol = _owner + + private var _tree: Tree[?]= _ + final def tree: Tree[?] = _tree + + private var _scope: Scope = uninitialized + final def scope: Scope = _scope + + private var _typerState: TyperState = uninitialized + final def typerState: TyperState = _typerState + + private var _gadtState: GadtState = uninitialized + final def gadtState: GadtState = _gadtState + + private var _searchHistory: SearchHistory = uninitialized + final def searchHistory: SearchHistory = _searchHistory + + private var _source: SourceFile = uninitialized + final def source: SourceFile = _source + + private var _moreProperties: Map[Key[Any], Any] = uninitialized + final def moreProperties: Map[Key[Any], Any] = _moreProperties + + private var _store: Store = uninitialized + final def store: Store = _store + + /** Initialize all context fields, except typerState, which has to be set separately + * @param outer The outer context + * @param origin The context from which fields are copied + */ + private[Contexts] def init(outer: Context, origin: Context): this.type = { + _outer = outer + _period = origin.period + _mode = origin.mode + _owner = origin.owner + _tree = origin.tree + _scope = origin.scope + _gadtState = origin.gadtState + _searchHistory = origin.searchHistory + _source = origin.source + _moreProperties = origin.moreProperties + _store = origin.store + this + } + + def reuseIn(outer: Context): this.type = + resetCaches() + init(outer, outer) + def setPeriod(period: Period): this.type = util.Stats.record("Context.setPeriod") - this.period = period + //assert(period.firstPhaseId == period.lastPhaseId, period) + this._period = period this + def setMode(mode: Mode): this.type = util.Stats.record("Context.setMode") - this.mode = mode + this._mode = mode this + def setOwner(owner: Symbol): this.type = util.Stats.record("Context.setOwner") assert(owner != NoSymbol) - this.owner = owner + this._owner = owner this - def setTree(tree: Tree[? >: Untyped]): this.type = + + def setTree(tree: Tree[?]): this.type = util.Stats.record("Context.setTree") - this.tree = tree + this._tree = tree + this + + def setScope(scope: Scope): this.type = + this._scope = scope this - def setScope(scope: Scope): this.type = { this.scope = scope; this } + def setNewScope: this.type = util.Stats.record("Context.setScope") - this.scope = newScope + this._scope = newScope this - def setTyperState(typerState: TyperState): this.type = { this.typerState = typerState; this } - def setNewTyperState(): this.type = setTyperState(typerState.fresh(committable = true)) - def setExploreTyperState(): this.type = setTyperState(typerState.fresh(committable = false)) - def setReporter(reporter: Reporter): this.type = setTyperState(typerState.fresh().setReporter(reporter)) - def setTyper(typer: Typer): this.type = { this.scope = typer.scope; setTypeAssigner(typer) } - def setGadt(gadt: GadtConstraint): this.type = - util.Stats.record("Context.setGadt") - this.gadt = gadt + + def setTyperState(typerState: TyperState): this.type = + this._typerState = typerState + this + def setNewTyperState(): this.type = + setTyperState(typerState.fresh(committable = true)) + def setExploreTyperState(): this.type = + setTyperState(typerState.fresh(committable = false)) + def setReporter(reporter: Reporter): this.type = + setTyperState(typerState.fresh().setReporter(reporter)) + + def setTyper(typer: Typer): this.type = + this._scope = typer.scope + setTypeAssigner(typer) + + def setGadtState(gadtState: GadtState): this.type = + util.Stats.record("Context.setGadtState") + this._gadtState = gadtState this - def setFreshGADTBounds: this.type = setGadt(gadt.fresh) + def setFreshGADTBounds: this.type = + setGadtState(gadtState.fresh) + def setSearchHistory(searchHistory: SearchHistory): this.type = util.Stats.record("Context.setSearchHistory") - this.searchHistory = searchHistory + this._searchHistory = searchHistory this + def setSource(source: SourceFile): this.type = util.Stats.record("Context.setSource") - this.source = source + this._source = source this + private def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = util.Stats.record("Context.setMoreProperties") - this.moreProperties = moreProperties + this._moreProperties = moreProperties this + private def setStore(store: Store): this.type = util.Stats.record("Context.setStore") - this.store = store + this._store = store this - def setImplicits(implicits: ContextualImplicits): this.type = { this.implicitsCache = implicits; this } def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { setSource(compilationUnit.source) @@ -681,6 +710,28 @@ object Contexts { def setDebug: this.type = setSetting(base.settings.Ydebug, true) } + object FreshContext: + /** Defines an initial context with given context base and possible settings. */ + def initial(base: ContextBase, settingsGroup: SettingGroup): Context = + val c = new FreshContext(base) + c._outer = NoContext + c._period = InitialPeriod + c._mode = Mode.None + c._typerState = TyperState.initialState() + c._owner = NoSymbol + c._tree = untpd.EmptyTree + c._moreProperties = Map(MessageLimiter -> DefaultMessageLimiter()) + c._scope = EmptyScope + c._source = NoSource + c._store = initialStore + .updated(settingsStateLoc, settingsGroup.defaultState) + .updated(notNullInfosLoc, Nil) + .updated(compilationUnitLoc, NoCompilationUnit) + c._searchHistory = new SearchRoot + c._gadtState = GadtState(GadtConstraint.empty) + c + end FreshContext + given ops: AnyRef with extension (c: Context) def addNotNullInfo(info: NotNullInfo) = @@ -710,56 +761,40 @@ object Contexts { final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode) } - private def exploreCtx(using Context): FreshContext = - util.Stats.record("explore") - val base = ctx.base - import base._ - val nestedCtx = - if exploresInUse < exploreContexts.size then - exploreContexts(exploresInUse).reuseIn(ctx) - else - val ts = TyperState() - .setReporter(ExploringReporter()) - .setCommittable(false) - val c = FreshContext(ctx.base).init(ctx, ctx).setTyperState(ts) - exploreContexts += c - c - exploresInUse += 1 - val nestedTS = nestedCtx.typerState - nestedTS.init(ctx.typerState, ctx.typerState.constraint) - nestedCtx - - private def wrapUpExplore(ectx: Context) = - ectx.reporter.asInstanceOf[ExploringReporter].reset() - ectx.base.exploresInUse -= 1 - + /** Run `op` with a pool-allocated context that has an ExporeTyperState. */ inline def explore[T](inline op: Context ?=> T)(using Context): T = - val ectx = exploreCtx - try op(using ectx) finally wrapUpExplore(ectx) + exploreInFreshCtx(op) + /** Run `op` with a pool-allocated FreshContext that has an ExporeTyperState. */ inline def exploreInFreshCtx[T](inline op: FreshContext ?=> T)(using Context): T = - val ectx = exploreCtx - try op(using ectx) finally wrapUpExplore(ectx) - - private def changeOwnerCtx(owner: Symbol)(using Context): Context = - val base = ctx.base - import base._ - val nestedCtx = - if changeOwnersInUse < changeOwnerContexts.size then - changeOwnerContexts(changeOwnersInUse).reuseIn(ctx) - else - val c = FreshContext(ctx.base).init(ctx, ctx) - changeOwnerContexts += c - c - changeOwnersInUse += 1 - nestedCtx.setOwner(owner).setTyperState(ctx.typerState) - - /** Run `op` in current context, with a mode is temporarily set as specified. + val pool = ctx.base.exploreContextPool + val nestedCtx = pool.next() + try op(using nestedCtx) + finally + nestedCtx.typerState.reporter.asInstanceOf[ExploringReporter].reset() + pool.free() + + /** Run `op` with a pool-allocated context that has a fresh typer state. + * Commit the typer state if `commit` applied to `op`'s result returns true. */ + inline def withFreshTyperState[T](inline op: Context ?=> T, inline commit: T => Context ?=> Boolean)(using Context): T = + val pool = ctx.base.freshTSContextPool + val nestedCtx = pool.next() + try + val result = op(using nestedCtx) + if commit(result)(using nestedCtx) then + nestedCtx.typerState.commit() + nestedCtx.typerState.setCommittable(true) + result + finally + pool.free() + + /** Run `op` with a pool-allocated context that has the given `owner`. */ inline def runWithOwner[T](owner: Symbol)(inline op: Context ?=> T)(using Context): T = if Config.reuseOwnerContexts then - try op(using changeOwnerCtx(owner)) - finally ctx.base.changeOwnersInUse -= 1 + val pool = ctx.base.generalContextPool + try op(using pool.next().setOwner(owner).setTyperState(ctx.typerState)) + finally pool.free() else op(using ctx.fresh.setOwner(owner)) @@ -796,30 +831,9 @@ object Contexts { finally ctx.base.comparersInUse = saved end comparing - /** A class defining the initial context with given context base - * and set of possible settings. - */ - private class InitialContext(base: ContextBase, settingsGroup: SettingGroup) extends FreshContext(base) { - outer = NoContext - period = InitialPeriod - mode = Mode.None - typerState = TyperState.initialState() - owner = NoSymbol - tree = untpd.EmptyTree - moreProperties = Map(MessageLimiter -> DefaultMessageLimiter()) - scope = EmptyScope - source = NoSource - store = initialStore - .updated(settingsStateLoc, settingsGroup.defaultState) - .updated(notNullInfosLoc, Nil) - .updated(compilationUnitLoc, NoCompilationUnit) - searchHistory = new SearchRoot - gadt = EmptyGadtConstraint - } - - @sharable object NoContext extends Context((null: ContextBase | Null).uncheckedNN) { - source = NoSource + @sharable val NoContext: Context = new FreshContext((null: ContextBase | Null).uncheckedNN) { override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null, false)(this: @unchecked) + setSource(NoSource) } /** A context base defines state and associated methods that exist once per @@ -833,10 +847,10 @@ object Contexts { val settings: ScalaSettings = new ScalaSettings /** The initial context */ - val initialCtx: Context = new InitialContext(this, settings) + val initialCtx: Context = FreshContext.initial(this: @unchecked, settings) /** The platform, initialized by `initPlatform()`. */ - private var _platform: Platform | Null = _ + private var _platform: Platform | Null = uninitialized /** The platform */ def platform: Platform = { @@ -872,6 +886,47 @@ object Contexts { allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) } + class ContextPool: + protected def fresh()(using Context): FreshContext = + FreshContext(ctx.base).init(ctx, ctx) + + private var inUse: Int = 0 + private var pool = new mutable.ArrayBuffer[FreshContext] + + def next()(using Context): FreshContext = + val base = ctx.base + import base._ + val nestedCtx = + if inUse < pool.size then + pool(inUse).reuseIn(ctx) + else + val c = fresh() + pool += c + c + inUse += 1 + nestedCtx + + final def free(): Unit = + inUse -= 1 + end ContextPool + + class TSContextPool extends ContextPool: + override def next()(using Context) = + val nextCtx = super.next() + nextCtx.typerState.init(ctx.typerState, ctx.typerState.constraint) + nextCtx + + class FreshTSContextPool extends TSContextPool: + override protected def fresh()(using Context) = + super.fresh().setTyperState(ctx.typerState.fresh(committable = true)) + + class ExploreContextPool extends TSContextPool: + override protected def fresh()(using Context) = + val ts = TyperState() + .setReporter(ExploringReporter()) + .setCommittable(false) + super.fresh().setTyperState(ts) + /** The essential mutable state of a context base, collected into a common class */ class ContextState { // Symbols state @@ -922,22 +977,27 @@ object Contexts { // Phases state - private[core] var phasesPlan: List[List[Phase]] = _ + private[core] var phasesPlan: List[List[Phase]] = uninitialized /** Phases by id */ - private[dotc] var phases: Array[Phase] = _ + private[dotc] var phases: Array[Phase] = uninitialized /** Phases with consecutive Transforms grouped into a single phase, Empty array if fusion is disabled */ private[core] var fusedPhases: Array[Phase] = Array.empty[Phase] /** Next denotation transformer id */ - private[core] var nextDenotTransformerId: Array[Int] = _ + private[core] var nextDenotTransformerId: Array[Int] = uninitialized - private[core] var denotTransformers: Array[DenotTransformer] = _ + private[core] var denotTransformers: Array[DenotTransformer] = uninitialized /** Flag to suppress inlining, set after overflow */ private[dotc] var stopInlining: Boolean = false + /** Cached -Yno-double-bindings setting. This is accessed from `setDenot`, which + * is fairly hot, so we don't want to lookup the setting each time it is called. + */ + private[dotc] var checkNoDoubleBindings = false + /** A variable that records that some error was reported in a globally committable context. * The error will not necessarlily be emitted, since it could still be that * the enclosing context will be aborted. The variable is used as a smoke test @@ -954,11 +1014,9 @@ object Contexts { protected[dotc] val indentTab: String = " " - private[Contexts] val exploreContexts = new mutable.ArrayBuffer[FreshContext] - private[Contexts] var exploresInUse: Int = 0 - - private[Contexts] val changeOwnerContexts = new mutable.ArrayBuffer[FreshContext] - private[Contexts] var changeOwnersInUse: Int = 0 + val exploreContextPool = ExploreContextPool() + val freshTSContextPool = FreshTSContextPool() + val generalContextPool = ContextPool() private[Contexts] val comparers = new mutable.ArrayBuffer[TypeComparer] private[Contexts] var comparersInUse: Int = 0 @@ -967,7 +1025,7 @@ object Contexts { private[core] val reusableDataReader = ReusableInstance(new ReusableDataReader()) - private[dotc] var wConfCache: (List[String], WConf) = _ + private[dotc] var wConfCache: (List[String], WConf) = uninitialized def sharedCharArray(len: Int): Array[Char] = while len > charArray.length do diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 59440d1cb965..4ef0dbc9a43b 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -9,8 +9,9 @@ import scala.util.control.NonFatal import Contexts._, Names._, Phases._, Symbols._ import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ import transform.MegaPhase +import reporting.{Message, NoExplanation} -/** This object provides useful implicit decorators for types defined elsewhere */ +/** This object provides useful extension methods for types defined elsewhere */ object Decorators { /** Extension methods for toType/TermName methods on PreNames. @@ -57,6 +58,12 @@ object Decorators { padding + s.replace("\n", "\n" + padding) end extension + /** Convert lazy string to message. To be with caution, since no message-defined + * formatting will be done on the string. + */ + extension (str: => String) + def toMessage: Message = NoExplanation(str)(using NoContext) + /** Implements a findSymbol method on iterators of Symbols that * works like find but avoids Option, replacing None with NoSymbol. */ @@ -74,7 +81,7 @@ object Decorators { /** Implements filterConserve, zipWithConserve methods * on lists that avoid duplication of list nodes where feasible. */ - implicit class ListDecorator[T](val xs: List[T]) extends AnyVal { + extension [T](xs: List[T]) final def mapconserve[U](f: T => U): List[U] = { @tailrec @@ -203,11 +210,18 @@ object Decorators { } /** Union on lists seen as sets */ - def | (ys: List[T]): List[T] = xs ::: (ys filterNot (xs contains _)) + def setUnion (ys: List[T]): List[T] = xs ::: ys.filterNot(xs contains _) - /** Intersection on lists seen as sets */ - def & (ys: List[T]): List[T] = xs filter (ys contains _) - } + /** Reduce left with `op` as long as list `xs` is not longer than `seqLimit`. + * Otherwise, split list in two half, reduce each, and combine with `op`. + */ + def reduceBalanced(op: (T, T) => T, seqLimit: Int = 100): T = + val len = xs.length + if len > seqLimit then + val (leading, trailing) = xs.splitAt(len / 2) + op(leading.reduceBalanced(op, seqLimit), trailing.reduceBalanced(op, seqLimit)) + else + xs.reduceLeft(op) extension [T, U](xss: List[List[T]]) def nestedMap(f: T => U): List[List[U]] = xss match @@ -265,17 +279,19 @@ object Decorators { catch case ex: CyclicReference => "... (caught cyclic reference) ..." case NonFatal(ex) - if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => - val msg = ex match { case te: TypeError => te.toMessage case _ => ex.getMessage } - s"[cannot display due to $msg, raw string = $x]" + if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => + s"... (cannot display due to ${ex.className} ${ex.getMessage}) ..." case _ => String.valueOf(x).nn + /** Returns the simple class name of `x`. */ + def className: String = x.getClass.getSimpleName.nn + extension [T](x: T) def assertingErrorsReported(using Context): T = { assert(ctx.reporter.errorsReported) x } - def assertingErrorsReported(msg: => String)(using Context): T = { + def assertingErrorsReported(msg: Message)(using Context): T = { assert(ctx.reporter.errorsReported, msg) x } @@ -285,21 +301,16 @@ object Decorators { if (xs.head eq x1) && (xs.tail eq xs1) then xs else x1 :: xs1 extension (sc: StringContext) + /** General purpose string formatting */ def i(args: Shown*)(using Context): String = new StringFormatter(sc).assemble(args) - /** Formatting for error messages: Like `i` but suppress follow-on - * error messages after the first one if some of their arguments are "non-sensical". - */ - def em(args: Shown*)(using Context): String = - forErrorMessages(new StringFormatter(sc).assemble(args)) - - /** Formatting with added explanations: Like `em`, but add explanations to - * give more info about type variables and to disambiguate where needed. + /** Interpolator yielding an error message, which undergoes + * the formatting defined in Message. */ - def ex(args: Shown*)(using Context): String = - explained(new StringFormatter(sc).assemble(args)) + def em(args: Shown*)(using Context): NoExplanation = + NoExplanation(i(args*)) extension [T <: AnyRef](arr: Array[T]) def binarySearch(x: T | Null): Int = java.util.Arrays.binarySearch(arr.asInstanceOf[Array[Object | Null]], x) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 83d945352321..148b314220a8 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -14,6 +14,7 @@ import typer.ImportInfo.RootRef import Comments.CommentsContext import Comments.Comment import util.Spans.NoSpan +import config.Feature import Symbols.requiredModuleRef import cc.{CapturingType, CaptureSet, EventuallyCapturingType} @@ -85,7 +86,7 @@ class Definitions { newPermanentClassSymbol(ScalaPackageClass, name, Artifact, completer).entered } - /** The trait FunctionN, ContextFunctionN, ErasedFunctionN or ErasedContextFunction, for some N + /** The trait FunctionN and ContextFunctionN for some N * @param name The name of the trait to be created * * FunctionN traits follow this template: @@ -103,21 +104,6 @@ class Definitions { * trait ContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { * def apply(using $x0: T0, ..., $x{N_1}: T{N-1}): R * } - * - * ErasedFunctionN traits follow this template: - * - * trait ErasedFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedContextFunctionN traits follow this template: - * - * trait ErasedContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(using erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedFunctionN and ErasedContextFunctionN erase to Function0. - * * ImpureXYZFunctionN follow this template: * * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {*} XYZFunctionN[T0,...,T{N-1}, R] @@ -148,8 +134,7 @@ class Definitions { val resParamRef = enterTypeParam(cls, paramNamePrefix ++ "R", Covariant, decls).typeRef val methodType = MethodType.companion( isContextual = name.isContextFunction, - isImplicit = false, - isErased = name.isErasedFunction) + isImplicit = false) decls.enter(newMethod(cls, nme.apply, methodType(argParamRefs, resParamRef), Deferred)) denot.info = ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) @@ -469,7 +454,6 @@ class Definitions { @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) - @tu lazy val captureRoot: TermSymbol = enterPermanentSymbol(nme.CAPTURE_ROOT, AnyType).asTerm /** Method representing a throw */ @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, @@ -518,8 +502,8 @@ class Definitions { def staticsMethod(name: PreName): TermSymbol = ScalaStaticsModule.requiredMethod(name) @tu lazy val DottyArraysModule: Symbol = requiredModule("scala.runtime.Arrays") - def newGenericArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") - def newArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newArray") + @tu lazy val newGenericArrayMethod: TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") + @tu lazy val newArrayMethod: TermSymbol = DottyArraysModule.requiredMethod("newArray") def getWrapVarargsArrayModule: Symbol = ScalaRuntimeModule @@ -530,9 +514,12 @@ class Definitions { }) @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") + def ListType: TypeRef = ListClass.typeRef @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") + def NilType: TermRef = NilModule.termRef @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") + def ConsType: TypeRef = ConsClass.typeRef @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") @tu lazy val SingletonClass: ClassSymbol = @@ -644,6 +631,8 @@ class Definitions { @tu lazy val RepeatedParamClass: ClassSymbol = enterSpecialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType)) + @tu lazy val IntoType: TypeSymbol = enterAliasType(tpnme.INTO, HKTypeLambda(TypeBounds.empty :: Nil)(_.paramRefs(0))) + // fundamental classes @tu lazy val StringClass: ClassSymbol = requiredClass("java.lang.String") def StringType: Type = StringClass.typeRef @@ -732,6 +721,10 @@ class Definitions { } def JavaEnumType = JavaEnumClass.typeRef + @tu lazy val MethodHandleClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandle") + @tu lazy val MethodHandlesLookupClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandles.Lookup") + @tu lazy val VarHandleClass: ClassSymbol = requiredClass("java.lang.invoke.VarHandle") + @tu lazy val StringBuilderClass: ClassSymbol = requiredClass("scala.collection.mutable.StringBuilder") @tu lazy val MatchErrorClass : ClassSymbol = requiredClass("scala.MatchError") @tu lazy val ConversionClass : ClassSymbol = requiredClass("scala.Conversion").typeRef.symbol.asClass @@ -808,6 +801,8 @@ class Definitions { @tu lazy val ClassTagModule: Symbol = ClassTagClass.companionModule @tu lazy val ClassTagModule_apply: Symbol = ClassTagModule.requiredMethod(nme.apply) + @tu lazy val ReflectSelectableTypeRef: TypeRef = requiredClassRef("scala.reflect.Selectable") + @tu lazy val TypeTestClass: ClassSymbol = requiredClass("scala.reflect.TypeTest") @tu lazy val TypeTest_unapply: Symbol = TypeTestClass.requiredMethod(nme.unapply) @tu lazy val TypeTestModule_identity: Symbol = TypeTestClass.companionModule.requiredMethod(nme.identity) @@ -856,7 +851,12 @@ class Definitions { @tu lazy val QuoteMatchingClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteMatching") @tu lazy val QuoteMatching_ExprMatch: Symbol = QuoteMatchingClass.requiredMethod("ExprMatch") + @tu lazy val QuoteMatching_ExprMatchModule: Symbol = QuoteMatchingClass.requiredClass("ExprMatchModule") @tu lazy val QuoteMatching_TypeMatch: Symbol = QuoteMatchingClass.requiredMethod("TypeMatch") + @tu lazy val QuoteMatching_TypeMatchModule: Symbol = QuoteMatchingClass.requiredClass("TypeMatchModule") + @tu lazy val QuoteMatchingModule: Symbol = requiredModule("scala.quoted.runtime.QuoteMatching") + @tu lazy val QuoteMatching_KNil: Symbol = QuoteMatchingModule.requiredType("KNil") + @tu lazy val QuoteMatching_KCons: Symbol = QuoteMatchingModule.requiredType("KCons") @tu lazy val ToExprModule: Symbol = requiredModule("scala.quoted.ToExpr") @tu lazy val ToExprModule_BooleanToExpr: Symbol = ToExprModule.requiredMethod("BooleanToExpr") @@ -889,6 +889,8 @@ class Definitions { @tu lazy val QuotedTypeModule: Symbol = QuotedTypeClass.companionModule @tu lazy val QuotedTypeModule_of: Symbol = QuotedTypeModule.requiredMethod("of") + @tu lazy val MacroAnnotationClass: ClassSymbol = requiredClass("scala.annotation.MacroAnnotation") + @tu lazy val CanEqualClass: ClassSymbol = getClassIfDefined("scala.Eql").orElse(requiredClass("scala.CanEqual")).asClass def CanEqual_canEqualAny(using Context): TermSymbol = val methodName = if CanEqualClass.name == tpnme.Eql then nme.eqlAny else nme.canEqualAny @@ -943,36 +945,46 @@ class Definitions { @tu lazy val RuntimeTuplesModule: Symbol = requiredModule("scala.runtime.Tuples") @tu lazy val RuntimeTuplesModuleClass: Symbol = RuntimeTuplesModule.moduleClass - lazy val RuntimeTuples_consIterator: Symbol = RuntimeTuplesModule.requiredMethod("consIterator") - lazy val RuntimeTuples_concatIterator: Symbol = RuntimeTuplesModule.requiredMethod("concatIterator") - lazy val RuntimeTuples_apply: Symbol = RuntimeTuplesModule.requiredMethod("apply") - lazy val RuntimeTuples_cons: Symbol = RuntimeTuplesModule.requiredMethod("cons") - lazy val RuntimeTuples_size: Symbol = RuntimeTuplesModule.requiredMethod("size") - lazy val RuntimeTuples_tail: Symbol = RuntimeTuplesModule.requiredMethod("tail") - lazy val RuntimeTuples_concat: Symbol = RuntimeTuplesModule.requiredMethod("concat") - lazy val RuntimeTuples_toArray: Symbol = RuntimeTuplesModule.requiredMethod("toArray") - lazy val RuntimeTuples_productToArray: Symbol = RuntimeTuplesModule.requiredMethod("productToArray") - lazy val RuntimeTuples_isInstanceOfTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfTuple") - lazy val RuntimeTuples_isInstanceOfEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfEmptyTuple") - lazy val RuntimeTuples_isInstanceOfNonEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfNonEmptyTuple") + @tu lazy val RuntimeTuples_consIterator: Symbol = RuntimeTuplesModule.requiredMethod("consIterator") + @tu lazy val RuntimeTuples_concatIterator: Symbol = RuntimeTuplesModule.requiredMethod("concatIterator") + @tu lazy val RuntimeTuples_apply: Symbol = RuntimeTuplesModule.requiredMethod("apply") + @tu lazy val RuntimeTuples_cons: Symbol = RuntimeTuplesModule.requiredMethod("cons") + @tu lazy val RuntimeTuples_size: Symbol = RuntimeTuplesModule.requiredMethod("size") + @tu lazy val RuntimeTuples_tail: Symbol = RuntimeTuplesModule.requiredMethod("tail") + @tu lazy val RuntimeTuples_concat: Symbol = RuntimeTuplesModule.requiredMethod("concat") + @tu lazy val RuntimeTuples_toArray: Symbol = RuntimeTuplesModule.requiredMethod("toArray") + @tu lazy val RuntimeTuples_productToArray: Symbol = RuntimeTuplesModule.requiredMethod("productToArray") + @tu lazy val RuntimeTuples_isInstanceOfTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfTuple") + @tu lazy val RuntimeTuples_isInstanceOfEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfEmptyTuple") + @tu lazy val RuntimeTuples_isInstanceOfNonEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfNonEmptyTuple") @tu lazy val TupledFunctionTypeRef: TypeRef = requiredClassRef("scala.util.TupledFunction") def TupledFunctionClass(using Context): ClassSymbol = TupledFunctionTypeRef.symbol.asClass def RuntimeTupleFunctionsModule(using Context): Symbol = requiredModule("scala.runtime.TupledFunctions") + @tu lazy val boundaryModule: Symbol = requiredModule("scala.util.boundary") + @tu lazy val LabelClass: Symbol = requiredClass("scala.util.boundary.Label") + @tu lazy val BreakClass: Symbol = requiredClass("scala.util.boundary.Break") + + @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") + @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") + @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") + @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") + @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") + @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") + // Annotation base classes @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") - @tu lazy val ClassfileAnnotationClass: ClassSymbol = requiredClass("scala.annotation.ClassfileAnnotation") @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") // Annotation classes + @tu lazy val AllowConversionsAnnot: ClassSymbol = requiredClass("scala.annotation.allowConversions") @tu lazy val AnnotationDefaultAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AnnotationDefault") @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") @tu lazy val CapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.capability") - @tu lazy val CaptureCheckedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.CaptureChecked") @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") @@ -984,8 +996,10 @@ class Definitions { @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") @tu lazy val InvariantBetweenAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InvariantBetween") @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") + @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") @tu lazy val MigrationAnnot: ClassSymbol = requiredClass("scala.annotation.migration") @tu lazy val NowarnAnnot: ClassSymbol = requiredClass("scala.annotation.nowarn") + @tu lazy val UnusedAnnot: ClassSymbol = requiredClass("scala.annotation.unused") @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") @@ -1007,10 +1021,15 @@ class Definitions { @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") + @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") + @tu lazy val BeanGetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanGetter") + @tu lazy val BeanSetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanSetter") @tu lazy val FieldMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.field") @tu lazy val GetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.getter") @tu lazy val ParamMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.param") @tu lazy val SetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.setter") + @tu lazy val CompanionClassMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.companionClass") + @tu lazy val CompanionMethodMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.companionMethod") @tu lazy val ShowAsInfixAnnot: ClassSymbol = requiredClass("scala.annotation.showAsInfix") @tu lazy val FunctionalInterfaceAnnot: ClassSymbol = requiredClass("java.lang.FunctionalInterface") @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") @@ -1023,8 +1042,10 @@ class Definitions { @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") // A list of meta-annotations that are relevant for fields and accessors - @tu lazy val FieldAccessorMetaAnnots: Set[Symbol] = - Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot) + @tu lazy val NonBeanMetaAnnots: Set[Symbol] = + Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot, CompanionClassMetaAnnot, CompanionMethodMetaAnnot) + @tu lazy val MetaAnnots: Set[Symbol] = + NonBeanMetaAnnots + BeanGetterMetaAnnot + BeanSetterMetaAnnot // A list of annotations that are commonly used to indicate that a field/method argument or return // type is not null. These annotations are used by the nullification logic in JavaNullInterop to @@ -1074,15 +1095,23 @@ class Definitions { sym.owner.linkedClass.typeRef object FunctionOf { - def apply(args: List[Type], resultType: Type, isContextual: Boolean = false, isErased: Boolean = false)(using Context): Type = - FunctionType(args.length, isContextual, isErased).appliedTo(args ::: resultType :: Nil) - def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean, Boolean)] = { - val tsym = ft.typeSymbol - if isFunctionClass(tsym) && ft.isRef(tsym) then - val targs = ft.dealias.argInfos - if (targs.isEmpty) None - else Some(targs.init, targs.last, tsym.name.isContextFunction, tsym.name.isErasedFunction) - else None + def apply(args: List[Type], resultType: Type, isContextual: Boolean = false)(using Context): Type = + val mt = MethodType.companion(isContextual, false)(args, resultType) + if mt.hasErasedParams then + RefinedType(ErasedFunctionClass.typeRef, nme.apply, mt) + else + FunctionType(args.length, isContextual).appliedTo(args ::: resultType :: Nil) + def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean)] = { + ft.dealias match + case RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) => + Some(mt.paramInfos, mt.resType, mt.isContextualMethod) + case _ => + val tsym = ft.dealias.typeSymbol + if isFunctionSymbol(tsym) && ft.isRef(tsym) then + val targs = ft.dealias.argInfos + if (targs.isEmpty) None + else Some(targs.init, targs.last, tsym.name.isContextFunction) + else None } } @@ -1154,7 +1183,7 @@ class Definitions { /** Extractor for context function types representing by-name parameters, of the form * `() ?=> T`. - * Under -Ycc, this becomes `() ?-> T` or `{r1, ..., rN} () ?-> T`. + * Under purefunctions, this becomes `() ?-> T` or `{r1, ..., rN} () ?-> T`. */ object ByNameFunction: def apply(tp: Type)(using Context): Type = tp match @@ -1341,6 +1370,15 @@ class Definitions { @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) + /** Base classes that are assumed to be pure for the purposes of capture checking. + * Every class inheriting from a pure baseclass is pure. + */ + @tu lazy val pureBaseClasses = Set(defn.AnyValClass, defn.ThrowableClass) + + /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, + */ + @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) @@ -1383,8 +1421,8 @@ class Definitions { val classRefs1 = new Array[TypeRef | Null](classRefs.length * 2) Array.copy(classRefs, 0, classRefs1, 0, classRefs.length) classRefs = classRefs1 - val funName = s"scala.$prefix$n" if classRefs(n) == null then + val funName = s"scala.$prefix$n" classRefs(n) = if prefix.startsWith("Impure") then staticRef(funName.toTypeName).symbol.typeRef @@ -1392,24 +1430,22 @@ class Definitions { classRefs(n).nn end FunType - private def funTypeIdx(isContextual: Boolean, isErased: Boolean, isImpure: Boolean): Int = + private def funTypeIdx(isContextual: Boolean, isImpure: Boolean): Int = (if isContextual then 1 else 0) - + (if isErased then 2 else 0) - + (if isImpure then 4 else 0) + + (if isImpure then 2 else 0) private val funTypeArray: IArray[FunType] = val arr = Array.ofDim[FunType](8) val choices = List(false, true) - for contxt <- choices; erasd <- choices; impure <- choices do + for contxt <- choices; impure <- choices do var str = "Function" if contxt then str = "Context" + str - if erasd then str = "Erased" + str if impure then str = "Impure" + str - arr(funTypeIdx(contxt, erasd, impure)) = FunType(str) + arr(funTypeIdx(contxt, impure)) = FunType(str) IArray.unsafeFromArray(arr) - def FunctionSymbol(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = - funTypeArray(funTypeIdx(isContextual, isErased, isImpure))(n).symbol + def FunctionSymbol(n: Int, isContextual: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = + funTypeArray(funTypeIdx(isContextual, isImpure))(n).symbol @tu lazy val Function0_apply: Symbol = Function0.requiredMethod(nme.apply) @tu lazy val ContextFunction0_apply: Symbol = ContextFunction0.requiredMethod(nme.apply) @@ -1419,12 +1455,14 @@ class Definitions { @tu lazy val Function2: Symbol = FunctionSymbol(2) @tu lazy val ContextFunction0: Symbol = FunctionSymbol(0, isContextual = true) - def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = - FunctionSymbol(n, isContextual && !ctx.erasedTypes, isErased, isImpure).typeRef + def FunctionType(n: Int, isContextual: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = + FunctionSymbol(n, isContextual && !ctx.erasedTypes, isImpure).typeRef lazy val PolyFunctionClass = requiredClass("scala.PolyFunction") def PolyFunctionType = PolyFunctionClass.typeRef + lazy val ErasedFunctionClass = requiredClass("scala.runtime.ErasedFunction") + /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */ def scalaClassName(cls: Symbol)(using Context): TypeName = cls.denot match case clsd: ClassDenotation if clsd.owner eq ScalaPackageClass => @@ -1457,8 +1495,6 @@ class Definitions { * - FunctionXXL * - FunctionN for N >= 0 * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 */ def isFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isFunction @@ -1477,12 +1513,6 @@ class Definitions { */ def isContextFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isContextFunction - /** Is an erased function class. - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 - */ - def isErasedFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isErasedFunction - /** Is either FunctionXXL or a class that will be erased to FunctionXXL * - FunctionXXL * - FunctionN for N >= 22 @@ -1519,8 +1549,7 @@ class Definitions { */ def functionTypeErasure(cls: Symbol): Type = val arity = scalaClassName(cls).functionArity - if cls.name.isErasedFunction then FunctionType(0) - else if arity > 22 then FunctionXXLClass.typeRef + if arity > 22 then FunctionXXLClass.typeRef else if arity >= 0 then FunctionType(arity) else NoType @@ -1535,12 +1564,21 @@ class Definitions { private val PredefImportFns: RootRef = RootRef(() => ScalaPredefModule.termRef, isPredef=true) - @tu private lazy val JavaRootImportFns: List[RootRef] = - if ctx.settings.YnoImports.value then Nil - else JavaImportFns + @tu private lazy val YimportsImportFns: List[RootRef] = ctx.settings.Yimports.value.map { name => + val denot = + getModuleIfDefined(name).suchThat(_.is(Module)) `orElse` + getPackageClassIfDefined(name).suchThat(_.is(Package)) + if !denot.exists then + report.error(s"error: bad preamble import $name") + val termRef = denot.symbol.termRef + RootRef(() => termRef) + } + + @tu private lazy val JavaRootImportFns: List[RootRef] = JavaImportFns @tu private lazy val ScalaRootImportFns: List[RootRef] = - if ctx.settings.YnoImports.value then Nil + if !ctx.settings.Yimports.isDefault then YimportsImportFns + else if ctx.settings.YnoImports.value then Nil else if ctx.settings.YnoPredef.value then ScalaImportFns else ScalaImportFns :+ PredefImportFns @@ -1630,6 +1668,15 @@ class Definitions { rec(tp.stripTypeVar, Nil, bound) } + def isSmallGenericTuple(tp: Type)(using Context): Boolean = + if tp.derivesFrom(defn.PairClass) && !defn.isTupleNType(tp.widenDealias) then + // If this is a generic tuple we need to cast it to make the TupleN/ members accessible. + // This works only for generic tuples of known size up to 22. + defn.tupleTypes(tp.widenTermRefExpr) match + case Some(elems) if elems.length <= Definitions.MaxTupleArity => true + case _ => false + else false + def isProductSubType(tp: Type)(using Context): Boolean = tp.derivesFrom(ProductClass) /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ContextFunctionN @@ -1642,16 +1689,29 @@ class Definitions { arity >= 0 && isFunctionClass(sym) && tp.isRef( - FunctionType(arity, sym.name.isContextFunction, sym.name.isErasedFunction).typeSymbol, + FunctionType(arity, sym.name.isContextFunction).typeSymbol, skipRefined = false) end isNonRefinedFunction - /** Is `tp` a representation of a (possibly dependent) function type or an alias of such? */ + /** Returns whether `tp` is an instance or a refined instance of: + * - scala.FunctionN + * - scala.ContextFunctionN + */ def isFunctionType(tp: Type)(using Context): Boolean = isNonRefinedFunction(tp.dropDependentRefinement) + /** Is `tp` a specialized, refined function type? Either an `ErasedFunction` or a `PolyFunction`. */ + def isRefinedFunctionType(tp: Type)(using Context): Boolean = + tp.derivesFrom(defn.PolyFunctionClass) || isErasedFunctionType(tp) + + /** Returns whether `tp` is an instance or a refined instance of: + * - scala.FunctionN + * - scala.ContextFunctionN + * - ErasedFunction + * - PolyFunction + */ def isFunctionOrPolyType(tp: Type)(using Context): Boolean = - isFunctionType(tp) || (tp.typeSymbol eq defn.PolyFunctionClass) + isFunctionType(tp) || isRefinedFunctionType(tp) private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = for base <- bases; tp <- paramTypes do @@ -1740,7 +1800,7 @@ class Definitions { @tu lazy val FunctionSpecializedApplyNames: collection.Set[Name] = Function0SpecializedApplyNames ++ Function1SpecializedApplyNames ++ Function2SpecializedApplyNames - def functionArity(tp: Type)(using Context): Int = tp.dropDependentRefinement.dealias.argInfos.length - 1 + def functionArity(tp: Type)(using Context): Int = tp.functionArgInfos.length - 1 /** Return underlying context function type (i.e. instance of an ContextFunctionN class) * or NoType if none exists. The following types are considered as underlying types: @@ -1752,6 +1812,8 @@ class Definitions { tp.stripTypeVar.dealias match case tp1: TypeParamRef if ctx.typerState.constraint.contains(tp1) => asContextFunctionType(TypeComparer.bounds(tp1).hiBound) + case tp1 @ RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) && mt.isContextualMethod => + tp1 case tp1 => if tp1.typeSymbol.name.isContextFunction && isFunctionType(tp1) then tp1 else NoType @@ -1765,18 +1827,28 @@ class Definitions { * types `As`, the result type `B` and a whether the type is an erased context function. */ object ContextFunctionType: - def unapply(tp: Type)(using Context): Option[(List[Type], Type, Boolean)] = + def unapply(tp: Type)(using Context): Option[(List[Type], Type, List[Boolean])] = if ctx.erasedTypes then atPhase(erasurePhase)(unapply(tp)) else - val tp1 = asContextFunctionType(tp) - if tp1.exists then - val args = tp1.dropDependentRefinement.argInfos - Some((args.init, args.last, tp1.typeSymbol.name.isErasedFunction)) - else None + asContextFunctionType(tp) match + case RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) => + Some((mt.paramInfos, mt.resType, mt.erasedParams)) + case tp1 if tp1.exists => + val args = tp1.functionArgInfos + val erasedParams = erasedFunctionParameters(tp1) + Some((args.init, args.last, erasedParams)) + case _ => None + + /* Returns a list of erased booleans marking whether parameters are erased, for a function type. */ + def erasedFunctionParameters(tp: Type)(using Context): List[Boolean] = tp.dealias match { + case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams + case tp if isFunctionType(tp) => List.fill(functionArity(tp)) { false } + case _ => Nil + } def isErasedFunctionType(tp: Type)(using Context): Boolean = - tp.dealias.typeSymbol.name.isErasedFunction && isFunctionType(tp) + tp.derivesFrom(defn.ErasedFunctionClass) /** A whitelist of Scala-2 classes that are known to be pure */ def isAssuredNoInits(sym: Symbol): Boolean = @@ -1821,20 +1893,53 @@ class Definitions { def isInfix(sym: Symbol)(using Context): Boolean = (sym eq Object_eq) || (sym eq Object_ne) - @tu lazy val assumedTransparentTraits = - Set[Symbol](ComparableClass, ProductClass, SerializableClass, - // add these for now, until we had a chance to retrofit 2.13 stdlib - // we should do a more through sweep through it then. - requiredClass("scala.collection.SortedOps"), - requiredClass("scala.collection.StrictOptimizedSortedSetOps"), - requiredClass("scala.collection.generic.DefaultSerializable"), - requiredClass("scala.collection.generic.IsIterable"), - requiredClass("scala.collection.generic.IsIterableOnce"), - requiredClass("scala.collection.generic.IsMap"), - requiredClass("scala.collection.generic.IsSeq"), - requiredClass("scala.collection.generic.Subtractable"), - requiredClass("scala.collection.immutable.StrictOptimizedSeqOps") - ) + @tu lazy val assumedTransparentNames: Map[Name, Set[Symbol]] = + // add these for now, until we had a chance to retrofit 2.13 stdlib + // we should do a more through sweep through it then. + val strs = Map( + "Any" -> Set("scala"), + "AnyVal" -> Set("scala"), + "Matchable" -> Set("scala"), + "Product" -> Set("scala"), + "Object" -> Set("java.lang"), + "Comparable" -> Set("java.lang"), + "Serializable" -> Set("java.io"), + "BitSetOps" -> Set("scala.collection"), + "IndexedSeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "IterableOnceOps" -> Set("scala.collection"), + "IterableOps" -> Set("scala.collection"), + "LinearSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "MapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedMapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedOps" -> Set("scala.collection"), + "SortedSetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "StrictOptimizedIterableOps" -> Set("scala.collection"), + "StrictOptimizedLinearSeqOps" -> Set("scala.collection"), + "StrictOptimizedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "ArrayDequeOps" -> Set("scala.collection.mutable"), + "DefaultSerializable" -> Set("scala.collection.generic"), + "IsIterable" -> Set("scala.collection.generic"), + "IsIterableLowPriority" -> Set("scala.collection.generic"), + "IsIterableOnce" -> Set("scala.collection.generic"), + "IsIterableOnceLowPriority" -> Set("scala.collection.generic"), + "IsMap" -> Set("scala.collection.generic"), + "IsSeq" -> Set("scala.collection.generic")) + strs.map { case (simple, pkgs) => ( + simple.toTypeName, + pkgs.map(pkg => staticRef(pkg.toTermName, isPackage = true).symbol.moduleClass) + ) + } + + def isAssumedTransparent(sym: Symbol): Boolean = + assumedTransparentNames.get(sym.name) match + case Some(pkgs) => pkgs.contains(sym.owner) + case none => false // ----- primitive value class machinery ------------------------------------------ @@ -1956,6 +2061,7 @@ class Definitions { orType, RepeatedParamClass, ByNameParamClass2x, + IntoType, AnyValClass, NullClass, NothingClass, @@ -1977,14 +2083,19 @@ class Definitions { this.initCtx = ctx if (!isInitialized) { // force initialization of every symbol that is synthesized or hijacked by the compiler - val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() - ++ (JavaEnumClass :: (if ctx.settings.Ycc.value then captureRoot :: Nil else Nil)) - + val forced = + syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass isInitialized = true } addSyntheticSymbolsComments } + /** Definitions used in Lazy Vals implementation */ + val LazyValsModuleName = "scala.runtime.LazyVals" + @tu lazy val LazyValsModule = requiredModule(LazyValsModuleName) + @tu lazy val LazyValsWaitingState = requiredClass(s"$LazyValsModuleName.Waiting") + @tu lazy val LazyValsControlState = requiredClass(s"$LazyValsModuleName.LazyValControlState") + def addSyntheticSymbolsComments(using Context): Unit = def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index a35f4d4c20c4..82368fd4dbf5 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -175,7 +175,7 @@ object Denotations { * * @param symbol The referencing symbol, or NoSymbol is none exists */ - abstract class Denotation(val symbol: Symbol, protected var myInfo: Type) extends PreDenotation with printing.Showable { + abstract class Denotation(val symbol: Symbol, protected var myInfo: Type, val isType: Boolean) extends PreDenotation with printing.Showable { type AsSeenFromResult <: Denotation /** The type info. @@ -194,12 +194,6 @@ object Denotations { */ def infoOrCompleter: Type - /** The period during which this denotation is valid. */ - def validFor: Period - - /** Is this a reference to a type symbol? */ - def isType: Boolean - /** Is this a reference to a term symbol? */ def isTerm: Boolean = !isType @@ -229,6 +223,15 @@ object Denotations { */ def current(using Context): Denotation + /** The period during which this denotation is valid. */ + private var myValidFor: Period = Nowhere + + final def validFor: Period = myValidFor + final def validFor_=(p: Period): Unit = { + myValidFor = p + symbol.invalidateDenotCache() + } + /** Is this denotation different from NoDenotation or an ErrorDenotation? */ def exists: Boolean = true @@ -300,9 +303,9 @@ object Denotations { case NoDenotation | _: NoQualifyingRef | _: MissingRef => def argStr = if (args.isEmpty) "" else i" matching ($args%, %)" val msg = - if (site.exists) i"$site does not have a member $kind $name$argStr" - else i"missing: $kind $name$argStr" - throw new TypeError(msg) + if site.exists then em"$site does not have a member $kind $name$argStr" + else em"missing: $kind $name$argStr" + throw TypeError(msg) case denot => denot.symbol } @@ -542,8 +545,7 @@ object Denotations { tp2 match case tp2: MethodType if TypeComparer.matchingMethodParams(tp1, tp2) - && tp1.isImplicitMethod == tp2.isImplicitMethod - && tp1.isErasedMethod == tp2.isErasedMethod => + && tp1.isImplicitMethod == tp2.isImplicitMethod => val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) if resType.exists then tp1.derivedLambdaType(mergeParamNames(tp1, tp2), tp1.paramInfos, resType) @@ -571,7 +573,7 @@ object Denotations { end infoMeet /** A non-overloaded denotation */ - abstract class SingleDenotation(symbol: Symbol, initInfo: Type) extends Denotation(symbol, initInfo) { + abstract class SingleDenotation(symbol: Symbol, initInfo: Type, isType: Boolean) extends Denotation(symbol, initInfo, isType) { protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation final def name(using Context): Name = symbol.name @@ -610,16 +612,13 @@ object Denotations { */ def signature(sourceLanguage: SourceLanguage)(using Context): Signature = if (isType) Signature.NotAMethod // don't force info if this is a type denotation - else info match { + else info match case info: MethodOrPoly => try info.signature(sourceLanguage) - catch { // !!! DEBUG - case scala.util.control.NonFatal(ex) => - report.echo(s"cannot take signature of $info") - throw ex - } + catch case ex: Exception => + if ctx.debug then report.echo(s"cannot take signature of $info") + throw ex case _ => Signature.NotAMethod - } def derivedSingleDenotation(symbol: Symbol, info: Type, pre: Type = this.prefix, isRefinedMethod: Boolean = this.isRefinedMethod)(using Context): SingleDenotation = if ((symbol eq this.symbol) && (info eq this.info) && (pre eq this.prefix) && (isRefinedMethod == this.isRefinedMethod)) this @@ -644,15 +643,19 @@ object Denotations { def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): SingleDenotation = val situated = if site == NoPrefix then this else asSeenFrom(site) - val sigMatches = sig.matchDegree(situated.signature) match - case FullMatch => - true - case MethodNotAMethodMatch => - // See comment in `matches` - relaxed && !symbol.is(JavaDefined) - case ParamMatch => - relaxed - case noMatch => + val sigMatches = + try + sig.matchDegree(situated.signature) match + case FullMatch => + true + case MethodNotAMethodMatch => + // See comment in `matches` + relaxed && !symbol.is(JavaDefined) + case ParamMatch => + relaxed + case noMatch => + false + catch case ex: MissingType => false if sigMatches && symbol.hasTargetName(targetName) then this else NoDenotation @@ -663,14 +666,6 @@ object Denotations { // ------ Transformations ----------------------------------------- - private var myValidFor: Period = Nowhere - - def validFor: Period = myValidFor - def validFor_=(p: Period): Unit = { - myValidFor = p - symbol.invalidateDenotCache() - } - /** The next SingleDenotation in this run, with wrap-around from last to first. * * There may be several `SingleDenotation`s with different validity @@ -694,7 +689,7 @@ object Denotations { if (validFor.firstPhaseId <= 1) this else { var current = nextInRun - while (current.validFor.code > this.myValidFor.code) current = current.nextInRun + while (current.validFor.code > this.validFor.code) current = current.nextInRun current } @@ -775,7 +770,7 @@ object Denotations { * are otherwise undefined. */ def skipRemoved(using Context): SingleDenotation = - if (myValidFor.code <= 0) nextDefined else this + if (validFor.code <= 0) nextDefined else this /** Produce a denotation that is valid for the given context. * Usually called when !(validFor contains ctx.period) @@ -792,15 +787,13 @@ object Denotations { def current(using Context): SingleDenotation = util.Stats.record("current") val currentPeriod = ctx.period - val valid = myValidFor + val valid = validFor def assertNotPackage(d: SingleDenotation, transformer: DenotTransformer) = d match case d: ClassDenotation => assert(!d.is(Package), s"illegal transformation of package denotation by transformer $transformer") case _ => - def escapeToNext = nextDefined.ensuring(_.validFor != Nowhere) - def toNewRun = util.Stats.record("current.bringForward") if exists then initial.bringForward().current else this @@ -836,9 +829,6 @@ object Denotations { // creations that way, and also avoid phase caches in contexts to get large. // To work correctly, we need to demand that the context with the new phase // is not retained in the result. - catch case ex: CyclicReference => - // println(s"error while transforming $this") - throw ex finally mutCtx.setPeriod(savedPeriod) if next eq cur then @@ -875,7 +865,7 @@ object Denotations { // can happen if we sit on a stale denotation which has been replaced // wholesale by an installAfter; in this case, proceed to the next // denotation and try again. - escapeToNext + nextDefined else if valid.runId != currentPeriod.runId then toNewRun else if currentPeriod.code > valid.code then @@ -962,7 +952,7 @@ object Denotations { case denot: SymDenotation => s"in ${denot.owner}" case _ => "" } - s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${myValidFor}, is referred to in run ${ctx.period}" + s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${validFor}, is referred to in run ${ctx.period}" } /** The period (interval of phases) for which there exists @@ -1076,6 +1066,7 @@ object Denotations { def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = f(this) type AsSeenFromResult = SingleDenotation + protected def computeAsSeenFrom(pre: Type)(using Context): SingleDenotation = { val symbol = this.symbol val owner = this match { @@ -1120,20 +1111,36 @@ object Denotations { then this else if symbol.isAllOf(ClassTypeParam) then val arg = symbol.typeRef.argForParam(pre, widenAbstract = true) - if arg.exists then - val newBounds = - if symbol.isCompleted && !symbol.info.containsLazyRefs - then symbol.info.bounds & arg.bounds - else arg.bounds - derivedSingleDenotation(symbol, newBounds, pre) + if arg.exists + then derivedSingleDenotation(symbol, normalizedArgBounds(arg.bounds), pre) else derived(symbol.info) else derived(symbol.info) } + + /** The argument bounds, possibly intersected with the parameter's info TypeBounds, + * if the latter is not F-bounded and does not refer to other type parameters + * of the same class, and the intersection is provably nonempty. + */ + private def normalizedArgBounds(argBounds: TypeBounds)(using Context): TypeBounds = + if symbol.isCompleted && !hasBoundsDependingOnParamsOf(symbol.owner) then + val combined @ TypeBounds(lo, hi) = symbol.info.bounds & argBounds + if (lo frozen_<:< hi) then combined + else argBounds + else argBounds + + private def hasBoundsDependingOnParamsOf(cls: Symbol)(using Context): Boolean = + val acc = new TypeAccumulator[Boolean]: + def apply(x: Boolean, tp: Type): Boolean = tp match + case _: LazyRef => true + case tp: TypeRef + if tp.symbol.isAllOf(ClassTypeParam) && tp.symbol.owner == cls => true + case _ => foldOver(x, tp) + acc(false, symbol.info) } - abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) extends SingleDenotation(symbol, initInfo) { + abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) + extends SingleDenotation(symbol, initInfo, initInfo.isInstanceOf[TypeType]) { def infoOrCompleter: Type = initInfo - def isType: Boolean = infoOrCompleter.isInstanceOf[TypeType] } class UniqueRefDenotation( @@ -1229,10 +1236,10 @@ object Denotations { /** An overloaded denotation consisting of the alternatives of both given denotations. */ - case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType) with MultiPreDenotation { + case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType, isType = false) with MultiPreDenotation { + validFor = denot1.validFor & denot2.validFor + final def infoOrCompleter: Type = multiHasNot("info") - final def validFor: Period = denot1.validFor & denot2.validFor - final def isType: Boolean = false final def hasUniqueSym: Boolean = false final def name(using Context): Name = denot1.name final def signature(using Context): Signature = Signature.OverloadedSignature diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 72428d02f5d3..f23dce020f10 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -350,14 +350,14 @@ object Flags { /** Symbol is a method which should be marked ACC_SYNCHRONIZED */ val (_, Synchronized @ _, _) = newFlags(36, "") - /** Symbol is a Java-style varargs method */ - val (_, JavaVarargs @ _, _) = newFlags(37, "") + /** Symbol is a Java-style varargs method / a Java annotation */ + val (_, JavaVarargs @ _, JavaAnnotation @ _) = newFlags(37, "", "") /** Symbol is a Java default method */ val (_, DefaultMethod @ _, _) = newFlags(38, "") /** Symbol is a transparent inline method or trait */ - val (Transparent @ _, _, _) = newFlags(39, "transparent") + val (Transparent @ _, _, TransparentType @ _) = newFlags(39, "transparent") /** Symbol is an enum class or enum case (if used with case) */ val (Enum @ _, EnumVal @ _, _) = newFlags(40, "enum") @@ -477,7 +477,7 @@ object Flags { */ val AfterLoadFlags: FlagSet = commonFlags( FromStartFlags, AccessFlags, Final, AccessorOrSealed, - Abstract, LazyOrTrait, SelfName, JavaDefined, Transparent) + Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent) /** A value that's unstable unless complemented with a Stable flag */ val UnstableValueFlags: FlagSet = Mutable | Method @@ -571,6 +571,7 @@ object Flags { val GivenOrImplicit: FlagSet = Given | Implicit val GivenOrImplicitVal: FlagSet = GivenOrImplicit.toTermFlags val GivenMethod: FlagSet = Given | Method + val LazyGiven: FlagSet = Given | Lazy val InlineOrProxy: FlagSet = Inline | InlineProxy // An inline method or inline argument proxy */ val InlineMethod: FlagSet = Inline | Method val InlineParam: FlagSet = Inline | Param @@ -608,5 +609,4 @@ object Flags { val SyntheticParam: FlagSet = Synthetic | Param val SyntheticTermParam: FlagSet = Synthetic | TermParam val SyntheticTypeParam: FlagSet = Synthetic | TypeParam - val TransparentTrait: FlagSet = Trait | Transparent } diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index d8e1c5276ab6..bb65cce84042 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -2,93 +2,171 @@ package dotty.tools package dotc package core -import Decorators._ -import Contexts._ -import Types._ -import Symbols._ +import Contexts.*, Decorators.*, Symbols.*, Types.* +import NameKinds.UniqueName +import config.Printers.{gadts, gadtsConstr} import util.{SimpleIdentitySet, SimpleIdentityMap} -import collection.mutable import printing._ +import scala.annotation.tailrec import scala.annotation.internal.sharable +import scala.collection.mutable + +object GadtConstraint: + @sharable val empty: GadtConstraint = + GadtConstraint(OrderingConstraint.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, false) /** Represents GADT constraints currently in scope */ -sealed abstract class GadtConstraint extends Showable { - /** Immediate bounds of `sym`. Does not contain lower/upper symbols (see [[fullBounds]]). */ - def bounds(sym: Symbol)(using Context): TypeBounds | Null +class GadtConstraint private ( + private val myConstraint: Constraint, + private val mapping: SimpleIdentityMap[Symbol, TypeVar], + private val reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol], + private val wasConstrained: Boolean, +) extends Showable: + def constraint: Constraint = myConstraint + def symbols: List[Symbol] = mapping.keys + def withConstraint(c: Constraint) = copy(myConstraint = c) + def withWasConstrained = copy(wasConstrained = true) + + def add(sym: Symbol, tv: TypeVar): GadtConstraint = copy( + mapping = mapping.updated(sym, tv), + reverseMapping = reverseMapping.updated(tv.origin, sym), + ) + + /** Is `sym1` ordered to be less than `sym2`? */ + def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = + constraint.isLess(tvarOrError(sym1).origin, tvarOrError(sym2).origin) /** Full bounds of `sym`, including TypeRefs to other lower/upper symbols. * * @note this performs subtype checks between ordered symbols. * Using this in isSubType can lead to infinite recursion. Consider `bounds` instead. */ - def fullBounds(sym: Symbol)(using Context): TypeBounds | Null - - /** Is `sym1` ordered to be less than `sym2`? */ - def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean - - /** Add symbols to constraint, correctly handling inter-dependencies. - * - * @see [[ConstraintHandling.addToConstraint]] - */ - def addToConstraint(syms: List[Symbol])(using Context): Boolean - def addToConstraint(sym: Symbol)(using Context): Boolean = addToConstraint(sym :: Nil) + def fullBounds(sym: Symbol)(using Context): TypeBounds | Null = mapping(sym) match + case null => null + case tv: TypeVar => fullBounds(tv.origin) // .ensuring(containsNoInternalTypes(_)) - /** Further constrain a symbol already present in the constraint. */ - def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean + /** Immediate bounds of `sym`. Does not contain lower/upper symbols (see [[fullBounds]]). */ + def bounds(sym: Symbol)(using Context): TypeBounds | Null = + mapping(sym) match + case null => null + case tv: TypeVar => + def retrieveBounds: TypeBounds = externalize(constraint.bounds(tv.origin)).bounds + retrieveBounds + //.showing(i"gadt bounds $sym: $result", gadts) + //.ensuring(containsNoInternalTypes(_)) /** Is the symbol registered in the constraint? * * @note this is true even if the symbol is constrained to be equal to another type, unlike [[Constraint.contains]]. */ - def contains(sym: Symbol)(using Context): Boolean + def contains(sym: Symbol)(using Context): Boolean = mapping(sym) != null /** GADT constraint narrows bounds of at least one variable */ - def isNarrowing: Boolean + def isNarrowing: Boolean = wasConstrained - /** See [[ConstraintHandling.approximation]] */ - def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type + def fullBounds(param: TypeParamRef)(using Context): TypeBounds = + nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) - def symbols: List[Symbol] + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = + externalize(constraint.nonParamBounds(param)).bounds - def fresh: GadtConstraint + def fullLowerBound(param: TypeParamRef)(using Context): Type = + val self = externalize(param) + constraint.minLower(param).foldLeft(nonParamBounds(param).lo) { (acc, p) => + externalize(p) match + // drop any lower param that is a GADT symbol + // and is upper-bounded by a non-Any super-type of the original parameter + // e.g. in pos/i14287.min + // B$1 had info <: X and fullBounds >: B$2 <: X, and + // B$2 had info <: B$1 and fullBounds <: B$1 + // We can use the info of B$2 to drop the lower-bound of B$1 + // and return non-bidirectional bounds B$1 <: X and B$2 <: B$1. + case tp: TypeRef if tp.symbol.isPatternBound && self =:= tp.info.hiBound => acc + case tp => acc | tp + } - /** Restore the state from other [[GadtConstraint]], probably copied using [[fresh]] */ - def restore(other: GadtConstraint): Unit + def fullUpperBound(param: TypeParamRef)(using Context): Type = + val self = externalize(param) + constraint.minUpper(param).foldLeft(nonParamBounds(param).hi) { (acc, u) => + externalize(u) match + case tp: TypeRef if tp.symbol.isPatternBound && self =:= tp.info.loBound => acc // like fullLowerBound + case tp => + // Any as the upper bound means "no bound", but if F is higher-kinded, + // Any & F = F[_]; this is wrong for us so we need to short-circuit + if acc.isAny then tp else acc & tp + } - def debugBoundsDescription(using Context): String -} + def externalize(tp: Type, theMap: TypeMap | Null = null)(using Context): Type = tp match + case param: TypeParamRef => reverseMapping(param) match + case sym: Symbol => sym.typeRef + case null => param + case tp: TypeAlias => tp.derivedAlias(externalize(tp.alias, theMap)) + case tp => (if theMap == null then ExternalizeMap() else theMap).mapOver(tp) -final class ProperGadtConstraint private( - private var myConstraint: Constraint, - private var mapping: SimpleIdentityMap[Symbol, TypeVar], - private var reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol], - private var wasConstrained: Boolean -) extends GadtConstraint with ConstraintHandling { - import dotty.tools.dotc.config.Printers.{gadts, gadtsConstr} - - def this() = this( - myConstraint = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentitySet.empty), - mapping = SimpleIdentityMap.empty, - reverseMapping = SimpleIdentityMap.empty, - wasConstrained = false - ) + private class ExternalizeMap(using Context) extends TypeMap: + def apply(tp: Type): Type = externalize(tp, this)(using mapCtx) - /** Exposes ConstraintHandling.subsumes */ - def subsumes(left: GadtConstraint, right: GadtConstraint, pre: GadtConstraint)(using Context): Boolean = { - def extractConstraint(g: GadtConstraint) = g match { - case s: ProperGadtConstraint => s.constraint - case EmptyGadtConstraint => OrderingConstraint.empty - } - subsumes(extractConstraint(left), extractConstraint(right), extractConstraint(pre)) + def tvarOrError(sym: Symbol)(using Context): TypeVar = + mapping(sym).ensuring(_ != null, i"not a constrainable symbol: $sym").uncheckedNN + + @tailrec final def stripInternalTypeVar(tp: Type): Type = tp match + case tv: TypeVar => + val inst = constraint.instType(tv) + if inst.exists then stripInternalTypeVar(inst) else tv + case _ => tp + + def internalize(tp: Type)(using Context): Type = tp match + case nt: NamedType => + val ntTvar = mapping(nt.symbol) + if ntTvar == null then tp + else ntTvar + case _ => tp + + private def containsNoInternalTypes(tp: Type, theAcc: TypeAccumulator[Boolean] | Null = null)(using Context): Boolean = tp match { + case tpr: TypeParamRef => !reverseMapping.contains(tpr) + case tv: TypeVar => !reverseMapping.contains(tv.origin) + case tp => + (if (theAcc != null) theAcc else new ContainsNoInternalTypesAccumulator()).foldOver(true, tp) + } + + private class ContainsNoInternalTypesAccumulator(using Context) extends TypeAccumulator[Boolean] { + override def apply(x: Boolean, tp: Type): Boolean = x && containsNoInternalTypes(tp, this) } + override def toText(printer: Printer): Texts.Text = printer.toText(this) + + /** Provides more information than toText, by showing the underlying Constraint details. */ + def debugBoundsDescription(using Context): String = i"$this\n$constraint" + + private def copy( + myConstraint: Constraint = myConstraint, + mapping: SimpleIdentityMap[Symbol, TypeVar] = mapping, + reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol] = reverseMapping, + wasConstrained: Boolean = wasConstrained, + ): GadtConstraint = GadtConstraint(myConstraint, mapping, reverseMapping, wasConstrained) +end GadtConstraint + +object GadtState: + def apply(gadt: GadtConstraint): GadtState = ProperGadtState(gadt) + +sealed trait GadtState { + this: ConstraintHandling => // Hide ConstraintHandling within GadtConstraintHandling + + def gadt: GadtConstraint + def gadt_=(g: GadtConstraint): Unit + override protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = // GADT constraints never involve wildcards and are not propagated outside // the case where they're valid, so no approximating is needed. rawBound - override def addToConstraint(params: List[Symbol])(using Context): Boolean = { + /** Add symbols to constraint, correctly handling inter-dependencies. + * + * @see [[ConstraintHandling.addToConstraint]] + */ + def addToConstraint(sym: Symbol)(using Context): Boolean = addToConstraint(sym :: Nil) + def addToConstraint(params: List[Symbol])(using Context): Boolean = { import NameKinds.DepParamName val poly1 = PolyType(params.map { sym => DepParamName.fresh(sym.name.toTypeName) })( @@ -98,22 +176,19 @@ final class ProperGadtConstraint private( // and used as orderings. def substDependentSyms(tp: Type, isUpper: Boolean)(using Context): Type = { def loop(tp: Type) = substDependentSyms(tp, isUpper) - tp match { + tp match case tp @ AndType(tp1, tp2) if !isUpper => tp.derivedAndType(loop(tp1), loop(tp2)) case tp @ OrType(tp1, tp2) if isUpper => tp.derivedOrType(loop(tp1), loop(tp2)) case tp: NamedType => - params.indexOf(tp.symbol) match { + params.indexOf(tp.symbol) match case -1 => - mapping(tp.symbol) match { + gadt.internalize(tp) match case tv: TypeVar => tv.origin - case null => tp - } + case _ => tp case i => pt.paramRefs(i) - } case tp => tp - } } val tb = param.info.bounds @@ -127,205 +202,87 @@ final class ProperGadtConstraint private( val tvars = params.lazyZip(poly1.paramRefs).map { (sym, paramRef) => val tv = TypeVar(paramRef, creatorState = null) - mapping = mapping.updated(sym, tv) - reverseMapping = reverseMapping.updated(tv.origin, sym) + gadt = gadt.add(sym, tv) tv } // The replaced symbols are picked up here. addToConstraint(poly1, tvars) - .showing(i"added to constraint: [$poly1] $params%, %\n$debugBoundsDescription", gadts) + .showing(i"added to constraint: [$poly1] $params%, % gadt = $gadt", gadts) } - override def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean = { - @annotation.tailrec def stripInternalTypeVar(tp: Type): Type = tp match { - case tv: TypeVar => - val inst = constraint.instType(tv) - if (inst.exists) stripInternalTypeVar(inst) else tv - case _ => tp - } - - val symTvar: TypeVar = stripInternalTypeVar(tvarOrError(sym)) match { + /** Further constrain a symbol already present in the constraint. */ + def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean = { + val symTvar: TypeVar = gadt.stripInternalTypeVar(gadt.tvarOrError(sym)) match case tv: TypeVar => tv case inst => gadts.println(i"instantiated: $sym -> $inst") - return if (isUpper) isSub(inst, bound) else isSub(bound, inst) - } + return if isUpper then isSub(inst, bound) else isSub(bound, inst) - val internalizedBound = bound match { - case nt: NamedType => - val ntTvar = mapping(nt.symbol) - if (ntTvar != null) stripInternalTypeVar(ntTvar) else bound - case _ => bound - } + val internalizedBound = gadt.stripInternalTypeVar(gadt.internalize(bound)) val saved = constraint val result = internalizedBound match case boundTvar: TypeVar => - if (boundTvar eq symTvar) true - else if (isUpper) addLess(symTvar.origin, boundTvar.origin) + if boundTvar eq symTvar then true + else if isUpper + then addLess(symTvar.origin, boundTvar.origin) else addLess(boundTvar.origin, symTvar.origin) case bound => addBoundTransitively(symTvar.origin, bound, isUpper) gadts.println { - val descr = if (isUpper) "upper" else "lower" - val op = if (isUpper) "<:" else ">:" + val descr = if isUpper then "upper" else "lower" + val op = if isUpper then "<:" else ">:" i"adding $descr bound $sym $op $bound = $result" } - if constraint ne saved then wasConstrained = true + if constraint ne saved then gadt = gadt.withWasConstrained result } - override def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = - constraint.isLess(tvarOrError(sym1).origin, tvarOrError(sym2).origin) - - override def fullBounds(sym: Symbol)(using Context): TypeBounds | Null = - mapping(sym) match { - case null => null - // TODO: Improve flow typing so that ascription becomes redundant - case tv: TypeVar => - fullBounds(tv.origin) - // .ensuring(containsNoInternalTypes(_)) - } - - override def bounds(sym: Symbol)(using Context): TypeBounds | Null = - mapping(sym) match { - case null => null - // TODO: Improve flow typing so that ascription becomes redundant - case tv: TypeVar => - def retrieveBounds: TypeBounds = externalize(bounds(tv.origin)).bounds - retrieveBounds - //.showing(i"gadt bounds $sym: $result", gadts) - //.ensuring(containsNoInternalTypes(_)) - } - - override def contains(sym: Symbol)(using Context): Boolean = mapping(sym) != null - - def isNarrowing: Boolean = wasConstrained - - override def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int)(using Context): Type = { - val res = - approximation(tvarOrError(sym).origin, fromBelow, maxLevel) match - case tpr: TypeParamRef => - // Here we do externalization when the returned type is a TypeParamRef, - // b/c ConstraintHandling.approximation may return internal types when - // the type variable is instantiated. See #15531. - externalize(tpr) - case tp => tp - - gadts.println(i"approximating $sym ~> $res") - res + /** See [[ConstraintHandling.approximation]] */ + def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = { + approximation(gadt.tvarOrError(sym).origin, fromBelow, maxLevel).match + case tpr: TypeParamRef => + // Here we do externalization when the returned type is a TypeParamRef, + // b/c ConstraintHandling.approximation may return internal types when + // the type variable is instantiated. See #15531. + gadt.externalize(tpr) + case tp => tp + .showing(i"approximating $sym ~> $result", gadts) } - override def symbols: List[Symbol] = mapping.keys + def fresh: GadtState = GadtState(gadt) - override def fresh: GadtConstraint = new ProperGadtConstraint( - myConstraint, - mapping, - reverseMapping, - wasConstrained - ) + /** Restore the GadtConstraint state. */ + def restore(gadt: GadtConstraint): Unit = this.gadt = gadt - def restore(other: GadtConstraint): Unit = other match { - case other: ProperGadtConstraint => - this.myConstraint = other.myConstraint - this.mapping = other.mapping - this.reverseMapping = other.reverseMapping - this.wasConstrained = other.wasConstrained - case _ => ; - } + inline def rollbackGadtUnless(inline op: Boolean): Boolean = + val saved = gadt + var result = false + try result = op + finally if !result then restore(saved) + result // ---- Protected/internal ----------------------------------------------- - override protected def constraint = myConstraint - override protected def constraint_=(c: Constraint) = myConstraint = c + override protected def constraint = gadt.constraint + override protected def constraint_=(c: Constraint) = gadt = gadt.withConstraint(c) override protected def isSub(tp1: Type, tp2: Type)(using Context): Boolean = TypeComparer.isSubType(tp1, tp2) override protected def isSame(tp1: Type, tp2: Type)(using Context): Boolean = TypeComparer.isSameType(tp1, tp2) - override def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = - externalize(constraint.nonParamBounds(param)).bounds - - override def fullLowerBound(param: TypeParamRef)(using Context): Type = - constraint.minLower(param).foldLeft(nonParamBounds(param).lo) { - (t, u) => t | externalize(u) - } - - override def fullUpperBound(param: TypeParamRef)(using Context): Type = - constraint.minUpper(param).foldLeft(nonParamBounds(param).hi) { (t, u) => - val eu = externalize(u) - // Any as the upper bound means "no bound", but if F is higher-kinded, - // Any & F = F[_]; this is wrong for us so we need to short-circuit - if t.isAny then eu else t & eu - } - - // ---- Private ---------------------------------------------------------- - - private def externalize(tp: Type, theMap: TypeMap | Null = null)(using Context): Type = tp match - case param: TypeParamRef => reverseMapping(param) match - case sym: Symbol => sym.typeRef - case null => param - case tp: TypeAlias => tp.derivedAlias(externalize(tp.alias, theMap)) - case tp => (if theMap == null then ExternalizeMap() else theMap).mapOver(tp) - - private class ExternalizeMap(using Context) extends TypeMap: - def apply(tp: Type): Type = externalize(tp, this)(using mapCtx) - - private def tvarOrError(sym: Symbol)(using Context): TypeVar = - mapping(sym).ensuring(_ != null, i"not a constrainable symbol: $sym").uncheckedNN - - private def containsNoInternalTypes(tp: Type, theAcc: TypeAccumulator[Boolean] | Null = null)(using Context): Boolean = tp match { - case tpr: TypeParamRef => !reverseMapping.contains(tpr) - case tv: TypeVar => !reverseMapping.contains(tv.origin) - case tp => - (if (theAcc != null) theAcc else new ContainsNoInternalTypesAccumulator()).foldOver(true, tp) - } - - private class ContainsNoInternalTypesAccumulator(using Context) extends TypeAccumulator[Boolean] { - override def apply(x: Boolean, tp: Type): Boolean = x && containsNoInternalTypes(tp, this) - } + override def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = gadt.nonParamBounds(param) + override def fullLowerBound(param: TypeParamRef)(using Context): Type = gadt.fullLowerBound(param) + override def fullUpperBound(param: TypeParamRef)(using Context): Type = gadt.fullUpperBound(param) // ---- Debug ------------------------------------------------------------ override def constr = gadtsConstr - - override def toText(printer: Printer): Texts.Text = constraint.toText(printer) - - override def debugBoundsDescription(using Context): String = { - val sb = new mutable.StringBuilder - sb ++= constraint.show - sb += '\n' - mapping.foreachBinding { case (sym, _) => - sb ++= i"$sym: ${fullBounds(sym)}\n" - } - sb.result - } } -@sharable object EmptyGadtConstraint extends GadtConstraint { - override def bounds(sym: Symbol)(using Context): TypeBounds | Null = null - override def fullBounds(sym: Symbol)(using Context): TypeBounds | Null = null - - override def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = unsupported("EmptyGadtConstraint.isLess") - - override def isNarrowing: Boolean = false - - override def contains(sym: Symbol)(using Context) = false - - override def addToConstraint(params: List[Symbol])(using Context): Boolean = unsupported("EmptyGadtConstraint.addToConstraint") - override def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean = unsupported("EmptyGadtConstraint.addBound") - - override def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int)(using Context): Type = unsupported("EmptyGadtConstraint.approximation") - - override def symbols: List[Symbol] = Nil - - override def fresh = new ProperGadtConstraint - override def restore(other: GadtConstraint): Unit = - assert(!other.isNarrowing, "cannot restore a non-empty GADTMap") - - override def debugBoundsDescription(using Context): String = "EmptyGadtConstraint" - - override def toText(printer: Printer): Texts.Text = "EmptyGadtConstraint" -} +// Hide ConstraintHandling within GadtState +private class ProperGadtState(private var myGadt: GadtConstraint) extends ConstraintHandling with GadtState: + def gadt: GadtConstraint = myGadt + def gadt_=(gadt: GadtConstraint): Unit = myGadt = gadt diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index 062ddd5e846c..60ebc95e7bed 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -100,16 +100,16 @@ object MatchTypeTrace: case TryReduce(scrut: Type) => i" trying to reduce $scrut" case NoMatches(scrut, cases) => - i""" failed since selector $scrut + i""" failed since selector $scrut | matches none of the cases | | ${casesText(cases)}""" case EmptyScrutinee(scrut) => - i""" failed since selector $scrut + i""" failed since selector $scrut | is uninhabited (there are no values of that type).""" case Stuck(scrut, stuckCase, otherCases) => val msg = - i""" failed since selector $scrut + i""" failed since selector $scrut | does not match ${caseText(stuckCase)} | and cannot be shown to be disjoint from it either.""" if otherCases.length == 0 then msg @@ -121,14 +121,14 @@ object MatchTypeTrace: | ${casesText(otherCases)}""" case NoInstance(scrut, stuckCase, fails) => def params = if fails.length == 1 then "parameter" else "parameters" - i""" failed since selector $scrut + i""" failed since selector $scrut | does not uniquely determine $params ${fails.map(_._1)}%, % in | ${caseText(stuckCase)} | The computed bounds for the $params are: | ${fails.map((name, bounds) => i"$name$bounds")}%\n %""" def noMatchesText(scrut: Type, cases: List[Type])(using Context): String = - i"""failed since selector $scrut + i"""failed since selector $scrut |matches none of the cases | | ${casesText(cases)}""" diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index d141cf7032ee..40a45b9f4678 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -70,14 +70,26 @@ object Mode { /** We are currently unpickling Scala2 info */ val Scala2Unpickling: Mode = newMode(13, "Scala2Unpickling") - /** We are currently checking bounds to be non-empty, so we should not - * do any widening when computing members of refined types. + /** Signifies one of two possible situations: + * 1. We are currently checking bounds to be non-empty, so we should not + * do any widening when computing members of refined types. + * 2. We are currently checking self type conformance, so we should not + * ignore capture sets added to otherwise pure classes (only needed + * for capture checking). */ - val CheckBounds: Mode = newMode(14, "CheckBounds") + val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") /** Use Scala2 scheme for overloading and implicit resolution */ val OldOverloadingResolution: Mode = newMode(15, "OldOverloadingResolution") + /** Treat CapturingTypes as plain AnnotatedTypes even in phase CheckCaptures. + * Reuses the value of OldOverloadingResolution to save Mode bits. + * This is OK since OldOverloadingResolution only affects implicit search, which + * is done during phases Typer and Inlinig, and IgnoreCaptures only has an + * effect during phase CheckCaptures. + */ + val IgnoreCaptures = OldOverloadingResolution + /** Allow hk applications of type lambdas to wildcard arguments; * used for checking that such applications do not normally arise */ diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index f71c16e82b70..2c968ab9446c 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -300,6 +300,7 @@ object NameKinds { val UniqueInlineName: UniqueNameKind = new UniqueNameKind("$i") val InlineScrutineeName: UniqueNameKind = new UniqueNameKind("$scrutinee") val InlineBinderName: UniqueNameKind = new UniqueNameKind("$proxy") + val MacroNames: UniqueNameKind = new UniqueNameKind("$macro$") /** A kind of unique extension methods; Unlike other unique names, these can be * unmangled. @@ -324,6 +325,8 @@ object NameKinds { val LocalOptInlineLocalObj: UniqueNameKind = new UniqueNameKind("ilo") + val BoundaryName: UniqueNameKind = new UniqueNameKind("boundary") + /** The kind of names of default argument getters */ val DefaultGetterName: NumberedNameKind = new NumberedNameKind(DEFAULTGETTER, "DefaultGetter") { def mkString(underlying: TermName, info: ThisInfo) = { diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index 47636e49e4fa..04440c9e9b39 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -86,11 +86,17 @@ object NameOps { def isVarPattern: Boolean = testSimple { n => n.length > 0 && { + def isLowerLetterSupplementary: Boolean = + import Character.{isHighSurrogate, isLowSurrogate, isLetter, isLowerCase, isValidCodePoint, toCodePoint} + isHighSurrogate(n(0)) && n.length > 1 && isLowSurrogate(n(1)) && { + val codepoint = toCodePoint(n(0), n(1)) + isValidCodePoint(codepoint) && isLetter(codepoint) && isLowerCase(codepoint) + } val first = n.head - (((first.isLower && first.isLetter) || first == '_') - && (n != false_) - && (n != true_) - && (n != null_)) + ((first.isLower && first.isLetter || first == '_' || isLowerLetterSupplementary) + && n != false_ + && n != true_ + && n != null_) } } || name.is(PatMatGivenVarName) @@ -98,7 +104,7 @@ object NameOps { case raw.NE | raw.LE | raw.GE | EMPTY => false case name: SimpleName => - name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.head) + name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.firstCodePoint) case _ => false } @@ -208,22 +214,22 @@ object NameOps { if str == mustHave then found = true idx + str.length else idx - val start = if ctx.settings.Ycc.value then skip(0, "Impure") else 0 - skip(skip(start, "Erased"), "Context") == suffixStart + skip(skip(0, "Impure"), "Context") == suffixStart && found } /** Same as `funArity`, except that it returns -1 if the prefix * is not one of a (possibly empty) concatenation of a subset of - * "Impure" (only under -Ycc), "Erased" and "Context" (in that order). + * "Impure" (only under pureFunctions), "Erased" and "Context" (in that order). */ private def checkedFunArity(suffixStart: Int)(using Context): Int = if isFunctionPrefix(suffixStart) then funArity(suffixStart) else -1 - /** Is a function name, i.e one of FunctionXXL, FunctionN, ContextFunctionN, ErasedFunctionN, ErasedContextFunctionN for N >= 0 + /** Is a function name, i.e one of FunctionXXL, FunctionN, ContextFunctionN, ImpureFunctionN, ImpureContextFunctionN for N >= 0 */ def isFunction(using Context): Boolean = - (name eq tpnme.FunctionXXL) || checkedFunArity(functionSuffixStart) >= 0 + (name eq tpnme.FunctionXXL) + || checkedFunArity(functionSuffixStart) >= 0 /** Is a function name * - FunctionN for N >= 0 @@ -236,14 +242,11 @@ object NameOps { isFunctionPrefix(suffixStart, mustHave) && funArity(suffixStart) >= 0 def isContextFunction(using Context): Boolean = isSpecificFunction("Context") - def isErasedFunction(using Context): Boolean = isSpecificFunction("Erased") def isImpureFunction(using Context): Boolean = isSpecificFunction("Impure") /** Is a synthetic function name, i.e. one of * - FunctionN for N > 22 * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N >= 0 - * - ErasedContextFunctionN for N >= 0 */ def isSyntheticFunction(using Context): Boolean = val suffixStart = functionSuffixStart @@ -353,6 +356,14 @@ object NameOps { val unmangled = kinds.foldLeft(name)(_.unmangle(_)) if (unmangled eq name) name else unmangled.unmangle(kinds) } + + def firstCodePoint: Int = + val first = name.firstPart + import Character.{isHighSurrogate, isLowSurrogate, isValidCodePoint, toCodePoint} + if isHighSurrogate(first(0)) && first.length > 1 && isLowSurrogate(first(1)) then + val codepoint = toCodePoint(first(0), first(1)) + if isValidCodePoint(codepoint) then codepoint else first(0) + else first(0) } extension (name: TermName) { diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index fa0a89349b5e..dc09edd79781 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -42,10 +42,10 @@ object NamerOps: case Nil => resultType case TermSymbols(params) :: paramss1 => - val (isContextual, isImplicit, isErased) = - if params.isEmpty then (false, false, false) - else (params.head.is(Given), params.head.is(Implicit), params.head.is(Erased)) - val make = MethodType.companion(isContextual = isContextual, isImplicit = isImplicit, isErased = isErased) + val (isContextual, isImplicit) = + if params.isEmpty then (false, false) + else (params.head.is(Given), params.head.is(Implicit)) + val make = MethodType.companion(isContextual = isContextual, isImplicit = isImplicit) if isJava then for param <- params do if param.info.isDirectRef(defn.ObjectClass) then param.info = defn.AnyType @@ -67,11 +67,11 @@ object NamerOps: completer.withSourceModule(findModuleBuddy(name.sourceModuleName, scope)) /** Find moduleClass/sourceModule in effective scope */ - def findModuleBuddy(name: Name, scope: Scope)(using Context) = { - val it = scope.lookupAll(name).filter(_.is(Module)) - if (it.hasNext) it.next() - else NoSymbol.assertingErrorsReported(s"no companion $name in $scope") - } + def findModuleBuddy(name: Name, scope: Scope, alternate: Name = EmptyTermName)(using Context): Symbol = + var it = scope.lookupAll(name).filter(_.is(Module)) + if !alternate.isEmpty then it ++= scope.lookupAll(alternate).filter(_.is(Module)) + if it.hasNext then it.next() + else NoSymbol.assertingErrorsReported(em"no companion $name in $scope") /** If a class has one of these flags, it does not get a constructor companion */ private val NoConstructorProxyNeededFlags = Abstract | Trait | Case | Synthetic | Module | Invisible @@ -212,11 +212,11 @@ object NamerOps: * by (ab?)-using GADT constraints. See pos/i941.scala. */ def linkConstructorParams(sym: Symbol, tparams: List[Symbol], rhsCtx: Context)(using Context): Unit = - rhsCtx.gadt.addToConstraint(tparams) + rhsCtx.gadtState.addToConstraint(tparams) tparams.lazyZip(sym.owner.typeParams).foreach { (psym, tparam) => val tr = tparam.typeRef - rhsCtx.gadt.addBound(psym, tr, isUpper = false) - rhsCtx.gadt.addBound(psym, tr, isUpper = true) + rhsCtx.gadtState.addBound(psym, tr, isUpper = false) + rhsCtx.gadtState.addBound(psym, tr, isUpper = true) } end NamerOps diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index f13c3a184bf9..1e08379b57f0 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -15,8 +15,8 @@ import scala.annotation.internal.sharable object Names { import NameKinds._ - /** Things that can be turned into names with `totermName` and `toTypeName` - * Decorators defines implements these as extension methods for strings. + /** Things that can be turned into names with `toTermName` and `toTypeName`. + * Decorators implements these as extension methods for strings. */ type PreName = Name | String @@ -25,7 +25,7 @@ object Names { */ abstract class Designator - /** A name if either a term name or a type name. Term names can be simple + /** A name is either a term name or a type name. Term names can be simple * or derived. A simple term name is essentially an interned string stored * in a name table. A derived term name adds a tag, and possibly a number * or a further simple name to some other name. diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 1341fac7d735..faea30390d2b 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -16,27 +16,34 @@ import cc.{CapturingType, derivedCapturingType} object OrderingConstraint { - type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] + /** If true, use reverse dependencies in `replace` to avoid checking the bounds + * of all parameters in the constraint. This can speed things up, but there are some + * rare corner cases where reverse dependencies miss a parameter. Specifically, + * if a constraint contains a free reference to TypeParam P and afterwards the + * same P is added as a bound variable to the constraint, a backwards link would + * then become necessary at this point but is missing. This causes two CB projects + * to fail when reverse dependencies are checked (parboiled2 and perspective). + * In these rare cases `replace` could behave differently when optimized. However, + * no deviation was found in the two projects. It is not clear what the "right" + * behavior of `replace` should be in these cases. Normally, PolyTypes added + * to constraints are supposed to be fresh, so that would mean that the behavior + * with optimizeReplace = true would be correct. But the previous behavior without + * reverse dependency checking corresponds to `optimizeReplace = false`. This behavior + * makes sense if we assume that the added polytype was simply added too late, so we + * want to establish the link between newly bound variable and pre-existing reference. + */ + private final val optimizeReplace = true + + private type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] /** The type of `OrderingConstraint#boundsMap` */ - type ParamBounds = ArrayValuedMap[Type] + private type ParamBounds = ArrayValuedMap[Type] /** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */ - type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] - - /** A new constraint with given maps and given set of hard typevars */ - private def newConstraint( - boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering, - hardVars: TypeVars)(using Context) : OrderingConstraint = - if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then - empty - else - val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) - if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) - result + private type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] /** A lens for updating a single entry array in one of the three constraint maps */ - abstract class ConstraintLens[T <: AnyRef: ClassTag] { + private abstract class ConstraintLens[T <: AnyRef: ClassTag] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[T] | Null def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[T])(using Context): OrderingConstraint def initial: T @@ -47,7 +54,7 @@ object OrderingConstraint { } /** The `current` constraint but with the entry for `param` updated to `entry`. - * `current` is used linearly. If it is different from `prev` it is + * `current` is used linearly. If it is different from `prev` then `current` is * known to be dead after the call. Hence it is OK to update destructively * parts of `current` which are not shared by `prev`. */ @@ -89,27 +96,27 @@ object OrderingConstraint { map(prev, current, param.binder, param.paramNum, f) } - val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { + private val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[Type] | Null = c.boundsMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[Type])(using Context): OrderingConstraint = - newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap, c.hardVars) + c.newConstraint(boundsMap = c.boundsMap.updated(poly, entries)) def initial = NoType } - val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.lowerMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap, c.hardVars) + c.newConstraint(lowerMap = c.lowerMap.updated(poly, entries)) def initial = Nil } - val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.upperMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries), c.hardVars) + c.newConstraint(upperMap = c.upperMap.updated(poly, entries)) def initial = Nil } @@ -143,11 +150,27 @@ class OrderingConstraint(private val boundsMap: ParamBounds, private val lowerMap : ParamOrdering, private val upperMap : ParamOrdering, private val hardVars : TypeVars) extends Constraint { + thisConstraint => import UnificationDirection.* type This = OrderingConstraint + /** A new constraint with given maps and given set of hard typevars */ + private def newConstraint( + boundsMap: ParamBounds = this.boundsMap, + lowerMap: ParamOrdering = this.lowerMap, + upperMap: ParamOrdering = this.upperMap, + hardVars: TypeVars = this.hardVars)(using Context) : OrderingConstraint = + if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then + empty + else + val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) + if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) + result.coDeps = this.coDeps + result.contraDeps = this.contraDeps + result + // ----------- Basic indices -------------------------------------------------- /** The number of type parameters in the given entry array */ @@ -201,6 +224,17 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def exclusiveUpper(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] = upper(param).filterNot(isLess(butNot, _)) + def bounds(param: TypeParamRef)(using Context): TypeBounds = { + val e = entry(param) + if (e.exists) e.bounds + else { + // TODO: should we change the type of paramInfos to nullable? + val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos + if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala + else TypeBounds.empty + } + } + // ---------- Info related to TypeParamRefs ------------------------------------------- def isLess(param1: TypeParamRef, param2: TypeParamRef): Boolean = @@ -217,6 +251,197 @@ class OrderingConstraint(private val boundsMap: ParamBounds, if tvar == null then NoType else tvar +// ------------- Type parameter dependencies ---------------------------------------- + + private type ReverseDeps = SimpleIdentityMap[TypeParamRef, SimpleIdentitySet[TypeParamRef]] + + /** A map that associates type parameters of this constraint with all other type + * parameters that refer to them in their bounds covariantly, such that, if the + * type parameter is instantiated to a larger type, the constraint would be narrowed + * (i.e. solution set changes other than simply being made larger). + */ + private var coDeps: ReverseDeps = SimpleIdentityMap.empty + + /** A map that associates type parameters of this constraint with all other type + * parameters that refer to them in their bounds covariantly, such that, if the + * type parameter is instantiated to a smaller type, the constraint would be narrowed. + * (i.e. solution set changes other than simply being made larger). + */ + private var contraDeps: ReverseDeps = SimpleIdentityMap.empty + + /** Null-safe indexing */ + extension (deps: ReverseDeps) def at(param: TypeParamRef): SimpleIdentitySet[TypeParamRef] = + val result = deps(param) + if null == result // swapped operand order important since `==` is overloaded in `SimpleIdentitySet` + then SimpleIdentitySet.empty + else result + + override def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean = + def origin(tv: TypeVar) = + assert(!instType(tv).exists) + tv.origin + val param = origin(tv) + val excluded = except.map(origin) + val qualifies: TypeParamRef => Boolean = !excluded.contains(_) + def test(deps: ReverseDeps, lens: ConstraintLens[List[TypeParamRef]]) = + deps.at(param).exists(qualifies) + || lens(this, tv.origin.binder, tv.origin.paramNum).exists(qualifies) + if co then test(coDeps, upperLens) else test(contraDeps, lowerLens) + + /** Modify traversals in two respects: + * - when encountering an application C[Ts], where C is a type variable or parameter + * that has an instantiation in this constraint, assume the type parameters of + * the instantiation instead of the type parameters of C when traversing the + * arguments Ts. That can make a difference for the variance in which an argument + * is traversed. Example constraint: + * + * constrained types: C[X], A + * A >: C[B] + * C := Option + * + * Here, B is traversed with variance +1 instead of 0. Test case: pos/t3152.scala + * + * - When typing a prefx, don't avoid negative variances. This matters only for the + * corner case where a parameter is instantiated to Nothing (see comment in + * TypeAccumulator#applyToPrefix). When determining instantiation directions in + * interpolations (which is what dependency variances are for), it can be ignored. + */ + private trait ConstraintAwareTraversal[T] extends TypeAccumulator[T]: + + /** Does `param` have bounds in the current constraint? */ + protected def hasBounds(param: TypeParamRef): Boolean = entry(param).isInstanceOf[TypeBounds] + + override def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = + def tparams(tycon: Type): List[ParamInfo] = tycon match + case tycon: TypeVar if !tycon.inst.exists => tparams(tycon.origin) + case tycon: TypeParamRef if !hasBounds(tycon) => + val entryParams = entry(tycon).typeParams + if entryParams.nonEmpty then entryParams + else tp.tyconTypeParams + case _ => tp.tyconTypeParams + tparams(tp.tycon) + + override def applyToPrefix(x: T, tp: NamedType): T = + this(x, tp.prefix) + end ConstraintAwareTraversal + + /** A type traverser that adjust dependencies originating from a given type + * @param ignoreBinding if not null, a parameter that is assumed to be still uninstantiated. + * This is necessary to handle replacements. + */ + private class Adjuster(srcParam: TypeParamRef, ignoreBinding: TypeParamRef | Null)(using Context) + extends TypeTraverser, ConstraintAwareTraversal[Unit]: + + var add: Boolean = compiletime.uninitialized + val seen = util.HashSet[LazyRef]() + + override protected def hasBounds(param: TypeParamRef) = + (param eq ignoreBinding) || super.hasBounds(param) + + def update(deps: ReverseDeps, referenced: TypeParamRef): ReverseDeps = + val prev = deps.at(referenced) + val newSet = if add then prev + srcParam else prev - srcParam + if newSet.isEmpty then deps.remove(referenced) + else deps.updated(referenced, newSet) + + def traverse(t: Type) = t match + case param: TypeParamRef => + if hasBounds(param) then + if variance >= 0 then coDeps = update(coDeps, param) + if variance <= 0 then contraDeps = update(contraDeps, param) + else + traverse(entry(param)) + case tp: LazyRef => + if !seen.contains(tp) then + seen += tp + traverse(tp.ref) + case _ => traverseChildren(t) + end Adjuster + + /** Adjust dependencies to account for the delta of previous entry `prevEntry` + * and the new bound `entry` for the type parameter `srcParam`. + */ + def adjustDeps(entry: Type | Null, prevEntry: Type | Null, srcParam: TypeParamRef, ignoreBinding: TypeParamRef | Null = null)(using Context): this.type = + val adjuster = new Adjuster(srcParam, ignoreBinding) + + /** Adjust reverse dependencies of all type parameters referenced by `bound` + * @param isLower `bound` is a lower bound + * @param add if true, add referenced variables to dependencoes, otherwise drop them. + */ + def adjustReferenced(bound: Type, isLower: Boolean, add: Boolean) = + adjuster.variance = if isLower then 1 else -1 + adjuster.add = add + adjuster.seen.clear(resetToInitial = false) + adjuster.traverse(bound) + + /** Use an optimized strategy to adjust dependencies to account for the delta + * of previous bound `prevBound` and new bound `bound`: If `prevBound` is some + * and/or prefix of `bound`, and `baseCase` is true, just add the new parts of `bound`. + * @param isLower `bound` and `prevBound` are lower bounds + * @return true iff the delta strategy succeeded, false if it failed in which case + * the constraint is left unchanged. + */ + def adjustDelta(bound: Type, prevBound: Type, isLower: Boolean, baseCase: => Boolean): Boolean = + if bound eq prevBound then + baseCase + else bound match + case bound: AndOrType => + adjustDelta(bound.tp1, prevBound, isLower, baseCase) && { + adjustReferenced(bound.tp2, isLower, add = true) + true + } + case _ => false + + /** Add or remove depenencies referenced in `bounds`. + * @param add if true, dependecies are added, otherwise they are removed + */ + def adjustBounds(bounds: TypeBounds, add: Boolean) = + adjustReferenced(bounds.lo, isLower = true, add) + adjustReferenced(bounds.hi, isLower = false, add) + + entry match + case entry @ TypeBounds(lo, hi) => + prevEntry match + case prevEntry @ TypeBounds(plo, phi) => + if !adjustDelta(lo, plo, isLower = true, + adjustDelta(hi, phi, isLower = false, true)) + then + adjustBounds(prevEntry, add = false) + adjustBounds(entry, add = true) + case _ => + adjustBounds(entry, add = true) + case _ => + prevEntry match + case prevEntry: TypeBounds => + adjustBounds(prevEntry, add = false) + case _ => + dropDeps(srcParam) // srcParam is instantiated, so its dependencies can be dropped + this + end adjustDeps + + /** Adjust dependencies to account for adding or dropping all `entries` associated + * with `poly`. + * @param add if true, entries is added, otherwise it is dropped + */ + def adjustDeps(poly: TypeLambda, entries: Array[Type], add: Boolean)(using Context): this.type = + for n <- 0 until paramCount(entries) do + if add + then adjustDeps(entries(n), NoType, poly.paramRefs(n)) + else adjustDeps(NoType, entries(n), poly.paramRefs(n)) + this + + /** Remove all reverse dependencies of `param` */ + def dropDeps(param: TypeParamRef)(using Context): Unit = + coDeps = coDeps.remove(param) + contraDeps = contraDeps.remove(param) + + /** A string representing the two dependency maps */ + def depsToString(using Context): String = + def depsStr(deps: ReverseDeps): String = + def depStr(param: TypeParamRef) = i"$param --> ${deps.at(param).toList}%, %" + if deps.isEmpty then "" else i"\n ${deps.toList.map((k, v) => depStr(k))}%\n %" + i" co-deps:${depsStr(coDeps)}\n contra-deps:${depsStr(contraDeps)}\n" + // ---------- Adding TypeLambdas -------------------------------------------------- /** The bound type `tp` without constrained parameters which are clearly @@ -282,7 +507,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val entries1 = new Array[Type](nparams * 2) poly.paramInfos.copyToArray(entries1, 0) tvars.copyToArray(entries1, nparams) - newConstraint(boundsMap.updated(poly, entries1), lowerMap, upperMap, hardVars).init(poly) + newConstraint(boundsMap = this.boundsMap.updated(poly, entries1)) + .init(poly) } /** Split dependent parameters off the bounds for parameters in `poly`. @@ -298,31 +524,23 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val param = poly.paramRefs(i) val bounds = dropWildcards(nonParamBounds(param)) val stripped = stripParams(bounds, todos, isUpper = true) - current = updateEntry(current, param, stripped) + current = boundsLens.update(this, current, param, stripped) while todos.nonEmpty do current = todos.head(current, param) todos.dropInPlace(1) i += 1 } - current.checkNonCyclic() + current.adjustDeps(poly, current.boundsMap(poly).nn, add = true) + .checkWellFormed() } // ---------- Updates ------------------------------------------------------------ - /** If `inst` is a TypeBounds, make sure it does not contain toplevel - * references to `param` (see `Constraint#occursAtToplevel` for a definition - * of "toplevel"). - * Any such references are replaced by `Nothing` in the lower bound and `Any` - * in the upper bound. - * References can be direct or indirect through instantiations of other - * parameters in the constraint. - */ - private def ensureNonCyclic(param: TypeParamRef, inst: Type)(using Context): Type = - - def recur(tp: Type, fromBelow: Boolean): Type = tp match + def validBoundFor(param: TypeParamRef, bound: Type, isUpper: Boolean)(using Context): Type = + def recur(tp: Type): Type = tp match case tp: AndOrType => - val r1 = recur(tp.tp1, fromBelow) - val r2 = recur(tp.tp2, fromBelow) + val r1 = recur(tp.tp1) + val r2 = recur(tp.tp2) if (r1 eq tp.tp1) && (r2 eq tp.tp2) then tp else tp.match case tp: OrType => @@ -331,35 +549,34 @@ class OrderingConstraint(private val boundsMap: ParamBounds, r1 & r2 case tp: TypeParamRef => if tp eq param then - if fromBelow then defn.NothingType else defn.AnyType + if isUpper then defn.AnyType else defn.NothingType else entry(tp) match case NoType => tp - case TypeBounds(lo, hi) => if lo eq hi then recur(lo, fromBelow) else tp - case inst => recur(inst, fromBelow) + case TypeBounds(lo, hi) => if lo eq hi then recur(lo) else tp + case inst => recur(inst) case tp: TypeVar => - val underlying1 = recur(tp.underlying, fromBelow) + val underlying1 = recur(tp.underlying) if underlying1 ne tp.underlying then underlying1 else tp case CapturingType(parent, refs) => - val parent1 = recur(parent, fromBelow) + val parent1 = recur(parent) if parent1 ne parent then tp.derivedCapturingType(parent1, refs) else tp case tp: AnnotatedType => - val parent1 = recur(tp.parent, fromBelow) + val parent1 = recur(tp.parent) if parent1 ne tp.parent then tp.derivedAnnotatedType(parent1, tp.annot) else tp case _ => val tp1 = tp.dealiasKeepAnnots if tp1 ne tp then - val tp2 = recur(tp1, fromBelow) + val tp2 = recur(tp1) if tp2 ne tp1 then tp2 else tp else tp - inst match - case bounds: TypeBounds => - bounds.derivedTypeBounds( - recur(bounds.lo, fromBelow = true), - recur(bounds.hi, fromBelow = false)) - case _ => - inst - end ensureNonCyclic + recur(bound) + end validBoundFor + + def validBoundsFor(param: TypeParamRef, bounds: TypeBounds)(using Context): Type = + bounds.derivedTypeBounds( + validBoundFor(param, bounds.lo, isUpper = false), + validBoundFor(param, bounds.hi, isUpper = true)) /** Add the fact `param1 <: param2` to the constraint `current` and propagate * `<:<` relationships between parameters ("edges") but not bounds. @@ -418,7 +635,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case param: TypeParamRef if contains(param) => param :: (if (isUpper) upper(param) else lower(param)) case tp: AndType if isUpper => - dependentParams(tp.tp1, isUpper) | (dependentParams(tp.tp2, isUpper)) + dependentParams(tp.tp1, isUpper).setUnion(dependentParams(tp.tp2, isUpper)) case tp: OrType if !isUpper => dependentParams(tp.tp1, isUpper).intersect(dependentParams(tp.tp2, isUpper)) case EtaExpansion(tycon) => @@ -426,10 +643,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case _ => Nil - private def updateEntry(current: This, param: TypeParamRef, tp: Type)(using Context): This = { - if Config.checkNoWildcardsInConstraint then assert(!tp.containsWildcardTypes) - var current1 = boundsLens.update(this, current, param, tp) - tp match { + private def updateEntry(current: This, param: TypeParamRef, newEntry: Type)(using Context): This = { + if Config.checkNoWildcardsInConstraint then assert(!newEntry.containsWildcardTypes) + val oldEntry = current.entry(param) + var current1 = boundsLens.update(this, current, param, newEntry) + .adjustDeps(newEntry, oldEntry, param) + newEntry match { case TypeBounds(lo, hi) => for p <- dependentParams(lo, isUpper = false) do current1 = order(current1, p, param) @@ -440,12 +659,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current1 } - /** The public version of `updateEntry`. Guarantees that there are no cycles */ def updateEntry(param: TypeParamRef, tp: Type)(using Context): This = - updateEntry(this, param, ensureNonCyclic(param, tp)).checkNonCyclic() + updateEntry(this, param, tp).checkWellFormed() def addLess(param1: TypeParamRef, param2: TypeParamRef, direction: UnificationDirection)(using Context): This = - order(this, param1, param2, direction).checkNonCyclic() + order(this, param1, param2, direction).checkWellFormed() // ---------- Replacements and Removals ------------------------------------- @@ -455,24 +673,80 @@ class OrderingConstraint(private val boundsMap: ParamBounds, */ def replace(param: TypeParamRef, tp: Type)(using Context): OrderingConstraint = val replacement = tp.dealiasKeepAnnots.stripTypeVar - if param == replacement then this.checkNonCyclic() + if param == replacement then this.checkWellFormed() else assert(replacement.isValueTypeOrLambda) - var current = - if isRemovable(param.binder) then remove(param.binder) - else updateEntry(this, param, replacement) - - def removeParam(ps: List[TypeParamRef]) = ps.filterConserve(param ne _) - def replaceParam(tp: Type, atPoly: TypeLambda, atIdx: Int): Type = - current.ensureNonCyclic(atPoly.paramRefs(atIdx), tp.substParam(param, replacement)) - - current.foreachParam { (p, i) => - current = boundsLens.map(this, current, p, i, replaceParam(_, p, i)) - current = lowerLens.map(this, current, p, i, removeParam) - current = upperLens.map(this, current, p, i, removeParam) - } - current.checkNonCyclic() + val replacedTypeVar = typeVarOfParam(param) + //println(i"replace $param with $replacement in $this") + + def mapReplacedTypeVarTo(to: Type) = new TypeMap: + override def apply(t: Type): Type = + if (t eq replacedTypeVar) && t.exists then to else mapOver(t) + + val coDepsOfParam = coDeps.at(param) + val contraDepsOfParam = contraDeps.at(param) + + var current = updateEntry(this, param, replacement) + // Need to update param early to avoid infinite recursion on instantiation. + // See i16311.scala for a test case. On the other hand, for the purposes of + // dependency adjustment, we need to pretend that `param` is still unbound. + // We achieve that by passing a `ignoreBinding = param` to `adjustDeps` below. + + def removeParamFrom(ps: List[TypeParamRef]) = + ps.filterConserve(param ne _) + + for lo <- lower(param) do + current = upperLens.map(this, current, lo, removeParamFrom) + for hi <- upper(param) do + current = lowerLens.map(this, current, hi, removeParamFrom) + + def replaceParamIn(other: TypeParamRef) = + val oldEntry = current.entry(other) + val newEntry = oldEntry.substParam(param, replacement) match + case tp: TypeBounds => current.validBoundsFor(other, tp) + case tp => tp + current = boundsLens.update(this, current, other, newEntry) + var oldDepEntry = oldEntry + var newDepEntry = newEntry + replacedTypeVar match + case tvar: TypeVar => + if tvar.inst.exists // `isInstantiated` would use ctx.typerState.constraint rather than the current constraint + then + // If the type variable has been instantiated, we need to forget about + // the instantiation for old dependencies. + // I.e. to find out what the old entry was, we should not follow + // the newly instantiated type variable but assume the type variable's origin `param`. + // An example where this happens is if `replace` is called from TypeVar's `instantiateWith`. + oldDepEntry = mapReplacedTypeVarTo(param)(oldDepEntry) + else + // If the type variable has not been instantiated, we need to replace references to it + // in the new entry by `replacement`. Otherwise we would get stuck in an uninstantiated + // type variable. + // An example where this happens is if `replace` is called from unify. + newDepEntry = mapReplacedTypeVarTo(replacement)(newDepEntry) + case _ => + if oldDepEntry ne newDepEntry then + current.adjustDeps(newDepEntry, oldDepEntry, other, ignoreBinding = param) + end replaceParamIn + + if optimizeReplace then + current.foreachParam { (p, i) => + val other = p.paramRefs(i) + entry(other) match + case _: TypeBounds => + if coDepsOfParam.contains(other) || contraDepsOfParam.contains(other) then + replaceParamIn(other) + case _ => replaceParamIn(other) + } + else + current.foreachParam { (p, i) => + val other = p.paramRefs(i) + if other != param then replaceParamIn(other) + } + if isRemovable(param.binder) then current = current.remove(param.binder) + current.dropDeps(param) + current.checkWellFormed() end replace def remove(pt: TypeLambda)(using Context): This = { @@ -485,7 +759,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } val hardVars1 = pt.paramRefs.foldLeft(hardVars)((hvs, param) => hvs - typeVarOfParam(param)) newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap), hardVars1) - .checkNonCyclic() + .adjustDeps(pt, boundsMap(pt).nn, add = false) + .checkWellFormed() } def isRemovable(pt: TypeLambda): Boolean = { @@ -511,7 +786,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def swapKey[T](m: ArrayValuedMap[T]) = val info = m(from) if info == null then m else m.remove(from).updated(to, info) - var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap), hardVars) + var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap)) def subst[T <: Type](x: T): T = x.subst(from, to).asInstanceOf[T] current.foreachParam {(p, i) => current = boundsLens.map(this, current, p, i, subst) @@ -519,12 +794,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current = upperLens.map(this, current, p, i, _.map(subst)) } constr.println(i"renamed $this to $current") - current.checkNonCyclic() + current.checkWellFormed() def isHard(tv: TypeVar) = hardVars.contains(tv) def withHard(tv: TypeVar)(using Context) = - newConstraint(boundsMap, lowerMap, upperMap, hardVars + tv) + newConstraint(hardVars = this.hardVars + tv) def instType(tvar: TypeVar): Type = entry(tvar.origin) match case _: TypeBounds => NoType @@ -551,6 +826,26 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(tvar.origin == param, i"mismatch $tvar, $param") case _ => + def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = + def occurs(tp: Type)(using Context): Boolean = tp match + case tp: AndOrType => + occurs(tp.tp1) || occurs(tp.tp2) + case tp: TypeParamRef => + (tp eq param) || entry(tp).match + case NoType => false + case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) + case inst => occurs(inst) + case tp: TypeVar => + occurs(tp.underlying) + case TypeBounds(lo, hi) => + occurs(lo) || occurs(hi) + case _ => + val tp1 = tp.dealias + (tp1 ne tp) && occurs(tp1) + + occurs(inst) + end occursAtToplevel + // ---------- Exploration -------------------------------------------------------- def domainLambdas: List[TypeLambda] = boundsMap.keys @@ -603,7 +898,57 @@ class OrderingConstraint(private val boundsMap: ParamBounds, // ---------- Checking ----------------------------------------------- - def checkNonCyclic()(using Context): this.type = + def checkWellFormed()(using Context): this.type = + + /** Check that each dependency A -> B in coDeps and contraDeps corresponds to + * a reference to A at the right variance in the entry of B. + */ + def checkBackward(deps: ReverseDeps, depsName: String, v: Int)(using Context): Unit = + deps.foreachBinding { (param, params) => + for srcParam <- params do + assert(contains(srcParam) && occursAtVariance(param, v, in = entry(srcParam)), + i"wrong $depsName backwards reference $param -> $srcParam in $thisConstraint") + } + + /** A type traverser that checks that all references bound in the constraint + * are accounted for in coDeps and/or contraDeps. + */ + def checkForward(srcParam: TypeParamRef)(using Context) = + new TypeTraverser with ConstraintAwareTraversal[Unit]: + val seen = util.HashSet[LazyRef]() + def traverse(t: Type): Unit = t match + case param: TypeParamRef if param ne srcParam => + def check(deps: ReverseDeps, directDeps: List[TypeParamRef], depsName: String) = + assert(deps.at(param).contains(srcParam) || directDeps.contains(srcParam), + i"missing $depsName backwards reference $param -> $srcParam in $thisConstraint") + entry(param) match + case _: TypeBounds => + if variance >= 0 then check(contraDeps, upper(param), "contra") + if variance <= 0 then check(coDeps, lower(param), "co") + case tp => + traverse(tp) + case tp: LazyRef => + if !seen.contains(tp) then + seen += tp + traverse(tp.ref) + case _ => traverseChildren(t) + + /** Does `param` occur at variance `v` or else at variance 0 in entry `in`? */ + def occursAtVariance(param: TypeParamRef, v: Int, in: Type)(using Context): Boolean = + val test = new TypeAccumulator[Boolean] with ConstraintAwareTraversal[Boolean]: + def apply(x: Boolean, t: Type): Boolean = + if x then true + else t match + case t: TypeParamRef => + entry(t) match + case _: TypeBounds => + t == param && (variance == 0 || variance == v) + case e => + apply(x, e) + case _ => + foldOver(x, t) + test(false, in) + if Config.checkConstraintsNonCyclic then domainParams.foreach { param => val inst = entry(param) @@ -612,28 +957,13 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(!occursAtToplevel(param, inst), s"cyclic bound for $param: ${inst.show} in ${this.show}") } - this - - def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = - - def occurs(tp: Type)(using Context): Boolean = tp match - case tp: AndOrType => - occurs(tp.tp1) || occurs(tp.tp2) - case tp: TypeParamRef => - (tp eq param) || entry(tp).match - case NoType => false - case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) - case inst => occurs(inst) - case tp: TypeVar => - occurs(tp.underlying) - case TypeBounds(lo, hi) => - occurs(lo) || occurs(hi) - case _ => - val tp1 = tp.dealias - (tp1 ne tp) && occurs(tp1) + if Config.checkConstraintDeps || ctx.settings.YcheckConstraintDeps.value then + checkBackward(coDeps, "co", -1) + checkBackward(contraDeps, "contra", +1) + domainParams.foreach(p => if contains(p) then checkForward(p).traverse(entry(p))) - occurs(inst) - end occursAtToplevel + this + end checkWellFormed override def checkClosed()(using Context): Unit = @@ -663,13 +993,16 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val constrainedText = " constrained types = " + domainLambdas.mkString("\n") val boundsText = - " bounds = " + { + "\n bounds = " + { val assocs = for (param <- domainParams) yield s"${param.binder.paramNames(param.paramNum)}: ${entryText(entry(param))}" assocs.mkString("\n") } - constrainedText + "\n" + boundsText + val depsText = + "\n coDeps = " + coDeps + + "\n contraDeps = " + contraDeps + constrainedText + boundsText + depsText } } diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index c5f126580df5..5e8a960608e6 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -261,30 +261,30 @@ trait PatternTypeConstrainer { self: TypeComparer => val assumeInvariantRefinement = migrateTo3 || forceInvariantRefinement || refinementIsInvariant(patternTp) - trace(i"constraining simple pattern type $tp >:< $pt", gadts, res => s"$res\ngadt = ${ctx.gadt.debugBoundsDescription}") { + trace(i"constraining simple pattern type $tp >:< $pt", gadts, (res: Boolean) => i"$res gadt = ${ctx.gadt}") { (tp, pt) match { case (AppliedType(tyconS, argsS), AppliedType(tyconP, argsP)) => val saved = state.nn.constraint - val savedGadt = ctx.gadt.fresh val result = - tyconS.typeParams.lazyZip(argsS).lazyZip(argsP).forall { (param, argS, argP) => - val variance = param.paramVarianceSign - if variance == 0 || assumeInvariantRefinement || - // As a special case, when pattern and scrutinee types have the same type constructor, - // we infer better bounds for pattern-bound abstract types. - argP.typeSymbol.isPatternBound && patternTp.classSymbol == scrutineeTp.classSymbol - then - val TypeBounds(loS, hiS) = argS.bounds - val TypeBounds(loP, hiP) = argP.bounds - var res = true - if variance < 1 then res &&= isSubType(loS, hiP) - if variance > -1 then res &&= isSubType(loP, hiS) - res - else true + ctx.gadtState.rollbackGadtUnless { + tyconS.typeParams.lazyZip(argsS).lazyZip(argsP).forall { (param, argS, argP) => + val variance = param.paramVarianceSign + if variance == 0 || assumeInvariantRefinement || + // As a special case, when pattern and scrutinee types have the same type constructor, + // we infer better bounds for pattern-bound abstract types. + argP.typeSymbol.isPatternBound && patternTp.classSymbol == scrutineeTp.classSymbol + then + val TypeBounds(loS, hiS) = argS.bounds + val TypeBounds(loP, hiP) = argP.bounds + var res = true + if variance < 1 then res &&= isSubType(loS, hiP) + if variance > -1 then res &&= isSubType(loP, hiS) + res + else true + } } if !result then constraint = saved - ctx.gadt.restore(savedGadt) result case _ => // Give up if we don't get AppliedType, e.g. if we upcasted to Any. diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala index 44d83dcb5278..ee877fb538d4 100644 --- a/compiler/src/dotty/tools/dotc/core/Periods.scala +++ b/compiler/src/dotty/tools/dotc/core/Periods.scala @@ -20,7 +20,7 @@ object Periods { /** Are all base types in the current period guaranteed to be the same as in period `p`? */ def currentHasSameBaseTypesAs(p: Period)(using Context): Boolean = val period = ctx.period - period == p || + period.code == p.code || period.runId == p.runId && unfusedPhases(period.phaseId).sameBaseTypesStartId == unfusedPhases(p.phaseId).sameBaseTypesStartId @@ -118,7 +118,8 @@ object Periods { apply(rid, 0, PhaseMask) } - final val Nowhere: Period = new Period(0) + inline val NowhereCode = 0 + final val Nowhere: Period = new Period(NowhereCode) final val InitialPeriod: Period = Period(InitialRunId, FirstPhaseId) diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index b4a2dcac1b85..00e017430a5f 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -197,6 +197,14 @@ object Phases { config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.toList}") } + /** Unlink `phase` from Denot transformer chain. This means that + * any denotation transformer defined by the phase will not be executed. + */ + def unlinkPhaseAsDenotTransformer(phase: Phase)(using Context) = + for i <- 0 until nextDenotTransformerId.length do + if nextDenotTransformerId(i) == phase.id then + nextDenotTransformerId(i) = nextDenotTransformerId(phase.id + 1) + private var myParserPhase: Phase = _ private var myTyperPhase: Phase = _ private var myPostTyperPhase: Phase = _ @@ -314,8 +322,8 @@ object Phases { units.map { unit => val unitCtx = ctx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports try run(using unitCtx) - catch case ex: Throwable => - println(s"$ex while running $phaseName on $unit") + catch case ex: Throwable if !ctx.run.enrichedErrorMessage => + println(ctx.run.enrichErrorMessage(s"unhandled exception while running $phaseName on $unit")) throw ex unitCtx.compilationUnit } diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index 863ae4fa6b7f..99076b422358 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -467,7 +467,7 @@ object Scopes { override def size: Int = 0 override def nestingLevel: Int = 0 override def toList(using Context): List[Symbol] = Nil - override def cloneScope(using Context): MutableScope = unsupported("cloneScope") + override def cloneScope(using Context): MutableScope = newScope(nestingLevel) override def lookupEntry(name: Name)(using Context): ScopeEntry | Null = null override def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry | Null = null } diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index bff957721b23..27e97a92b48e 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -3,6 +3,7 @@ package core import scala.collection.mutable import scala.annotation.switch +import scala.annotation.internal.sharable import Names._ import Symbols._ import Contexts._ @@ -40,7 +41,9 @@ object StdNames { inline val Tuple = "Tuple" inline val Product = "Product" - def sanitize(str: String): String = str.replaceAll("""[<>]""", """\$""").nn + @sharable + private val disallowed = java.util.regex.Pattern.compile("""[<>]""").nn + def sanitize(str: String): String = disallowed.matcher(str).nn.replaceAll("""\$""").nn } abstract class DefinedNames[N <: Name] { @@ -128,6 +131,7 @@ object StdNames { val EXCEPTION_RESULT_PREFIX: N = "exceptionResult" val EXPAND_SEPARATOR: N = str.EXPAND_SEPARATOR val IMPORT: N = "" + val INTO: N = "" val MODULE_SUFFIX: N = str.MODULE_SUFFIX val OPS_PACKAGE: N = "" val OVERLOADED: N = "" @@ -208,6 +212,7 @@ object StdNames { final val Throwable: N = "Throwable" final val IOOBException: N = "IndexOutOfBoundsException" final val FunctionXXL: N = "FunctionXXL" + final val ErasedFunction: N = "ErasedFunction" final val Abs: N = "Abs" final val And: N = "&&" @@ -243,7 +248,6 @@ object StdNames { final val ToString: N = "ToString" final val Xor: N = "^" - final val ClassfileAnnotation: N = "ClassfileAnnotation" final val ClassManifest: N = "ClassManifest" final val Enum: N = "Enum" final val Group: N = "Group" @@ -420,11 +424,14 @@ object StdNames { val assert_ : N = "assert" val assume_ : N = "assume" val box: N = "box" + val break: N = "break" val build : N = "build" val bundle: N = "bundle" val bytes: N = "bytes" val canEqual_ : N = "canEqual" val canEqualAny : N = "canEqualAny" + val caps: N = "caps" + val captureChecking: N = "captureChecking" val checkInitialized: N = "checkInitialized" val classOf: N = "classOf" val classType: N = "classType" @@ -445,6 +452,7 @@ object StdNames { val derived: N = "derived" val derives: N = "derives" val doubleHash: N = "doubleHash" + val dotty: N = "dotty" val drop: N = "drop" val dynamics: N = "dynamics" val elem: N = "elem" @@ -498,6 +506,7 @@ object StdNames { val info: N = "info" val inlinedEquals: N = "inlinedEquals" val internal: N = "internal" + val into: N = "into" val isArray: N = "isArray" val isDefinedAt: N = "isDefinedAt" val isDefinedAtImpl: N = "$isDefinedAt" @@ -507,10 +516,12 @@ object StdNames { val isInstanceOfPM: N = "$isInstanceOf$" val java: N = "java" val key: N = "key" + val label: N = "label" val lang: N = "lang" val language: N = "language" val length: N = "length" val lengthCompare: N = "lengthCompare" + val local: N = "local" val longHash: N = "longHash" val macroThis : N = "_this" val macroContext : N = "c" @@ -822,7 +833,7 @@ object StdNames { def newBitmapName(bitmapPrefix: TermName, n: Int): TermName = bitmapPrefix ++ n.toString - def selectorName(n: Int): TermName = "_" + (n + 1) + def selectorName(n: Int): TermName = productAccessorName(n + 1) object primitive { val arrayApply: TermName = "[]apply" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index d0bf0f4da6dc..beeaa2ee922e 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -24,7 +24,7 @@ import config.Config import reporting._ import collection.mutable import transform.TypeUtils._ -import cc.{CapturingType, derivedCapturingType} +import cc.{CapturingType, derivedCapturingType, Setup, EventuallyCapturingType, isEventuallyCapturingType} import scala.annotation.internal.sharable @@ -39,7 +39,7 @@ object SymDenotations { final val name: Name, initFlags: FlagSet, initInfo: Type, - initPrivateWithin: Symbol = NoSymbol) extends SingleDenotation(symbol, initInfo) { + initPrivateWithin: Symbol = NoSymbol) extends SingleDenotation(symbol, initInfo, name.isTypeName) { //assert(symbol.id != 4940, name) @@ -168,7 +168,8 @@ object SymDenotations { } } else { - if (myFlags.is(Touched)) throw CyclicReference(this) + if (myFlags.is(Touched)) + throw CyclicReference(this)(using ctx.withOwner(symbol)) myFlags |= Touched atPhase(validFor.firstPhaseId)(completer.complete(this)) } @@ -251,6 +252,18 @@ object SymDenotations { final def filterAnnotations(p: Annotation => Boolean)(using Context): Unit = annotations = annotations.filterConserve(p) + def annotationsCarrying(meta: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): List[Annotation] = + annotations.filterConserve(_.hasOneOfMetaAnnotation(meta, orNoneOf = orNoneOf)) + + def keepAnnotationsCarrying(phase: DenotTransformer, meta: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): Unit = + updateAnnotationsAfter(phase, annotationsCarrying(meta, orNoneOf = orNoneOf)) + + def updateAnnotationsAfter(phase: DenotTransformer, annots: List[Annotation])(using Context): Unit = + if annots ne annotations then + val cpy = copySymDenotation() + cpy.annotations = annots + cpy.installAfter(phase) + /** Optionally, the annotation matching the given class symbol */ final def getAnnotation(cls: Symbol)(using Context): Option[Annotation] = dropOtherAnnotations(annotations, cls) match { @@ -273,7 +286,7 @@ object SymDenotations { /** Add the given annotation without parameters to the annotations of this denotation */ final def addAnnotation(cls: ClassSymbol)(using Context): Unit = - addAnnotation(Annotation(cls)) + addAnnotation(Annotation(cls, symbol.span)) /** Remove annotation with given class from this denotation */ final def removeAnnotation(cls: Symbol)(using Context): Unit = @@ -505,6 +518,30 @@ object SymDenotations { /** `fullName` where `.' is the separator character */ def fullName(using Context): Name = fullNameSeparated(QualifiedName) + /** The fully qualified name on the JVM of the class corresponding to this symbol. */ + def binaryClassName(using Context): String = + val builder = new StringBuilder + val pkg = enclosingPackageClass + if !pkg.isEffectiveRoot then + builder.append(pkg.fullName.mangledString) + builder.append(".") + val flatName = this.flatName + // Some companion objects are fake (that is, they're a compiler fiction + // that doesn't correspond to a class that exists at runtime), this + // can happen in two cases: + // - If a Java class has static members. + // - If we create constructor proxies for a class (see NamerOps#addConstructorProxies). + // + // In both cases it's may be vital that we don't return the object name. + // For instance, sending it to zinc: when sbt is restarted, zinc will inspect the binary + // dependencies to see if they're still on the classpath, if it + // doesn't find them it will invalidate whatever referenced them, so + // any reference to a fake companion will lead to extra recompilations. + // Instead, use the class name since it's guaranteed to exist at runtime. + val clsFlatName = if isOneOf(JavaDefined | ConstructorProxy) then flatName.stripModuleClassSuffix else flatName + builder.append(clsFlatName.mangledString) + builder.toString + private var myTargetName: Name | Null = null private def computeTargetName(targetNameAnnot: Option[Annotation])(using Context): Name = @@ -542,9 +579,6 @@ object SymDenotations { // ----- Tests ------------------------------------------------- - /** Is this denotation a type? */ - override def isType: Boolean = name.isTypeName - /** Is this denotation a class? */ final def isClass: Boolean = isInstanceOf[ClassDenotation] @@ -748,7 +782,7 @@ object SymDenotations { * So the first call to a stable member might fail and/or produce side effects. */ final def isStableMember(using Context): Boolean = { - def isUnstableValue = isOneOf(UnstableValueFlags) || info.isInstanceOf[ExprType] + def isUnstableValue = isOneOf(UnstableValueFlags) || info.isInstanceOf[ExprType] || isAllOf(InlineParam) isType || is(StableRealizable) || exists && !isUnstableValue } @@ -808,19 +842,14 @@ object SymDenotations { /** Is this a Scala or Java annotation ? */ def isAnnotation(using Context): Boolean = - isClass && derivesFrom(defn.AnnotationClass) + isClass && (derivesFrom(defn.AnnotationClass) || is(JavaAnnotation)) /** Is this symbol a class that extends `java.io.Serializable` ? */ def isSerializable(using Context): Boolean = isClass && derivesFrom(defn.JavaSerializableClass) - /** Is this symbol a class that extends `AnyVal`? */ - final def isValueClass(using Context): Boolean = - val di = initial - di.isClass - && atPhase(di.validFor.firstPhaseId)(di.derivesFrom(defn.AnyValClass)) - // We call derivesFrom at the initial phase both because AnyVal does not exist - // after Erasure and to avoid cyclic references caused by forcing denotations + /** Is this symbol a class that extends `AnyVal`? Overridden in ClassDenotation */ + def isValueClass(using Context): Boolean = false /** Is this symbol a class of which `null` is a value? */ final def isNullableClass(using Context): Boolean = @@ -960,6 +989,26 @@ object SymDenotations { def isSkolem: Boolean = name == nme.SKOLEM + // Java language spec: https://docs.oracle.com/javase/specs/jls/se11/html/jls-15.html#jls-15.12.3 + // Scala 2 spec: https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#signature-polymorphic-methods + def isSignaturePolymorphic(using Context): Boolean = + containsSignaturePolymorphic + && is(JavaDefined) + && hasAnnotation(defn.NativeAnnot) + && atPhase(typerPhase)(symbol.denot).paramSymss.match + case List(List(p)) => p.info.isRepeatedParam + case _ => false + + def containsSignaturePolymorphic(using Context): Boolean = + maybeOwner == defn.MethodHandleClass + || maybeOwner == defn.VarHandleClass + + def originalSignaturePolymorphic(using Context): Denotation = + if containsSignaturePolymorphic && !isSignaturePolymorphic then + val d = owner.info.member(name) + if d.symbol.isSignaturePolymorphic then d else NoDenotation + else NoDenotation + def isInlineMethod(using Context): Boolean = isAllOf(InlineMethod, butNot = Accessor) @@ -1053,6 +1102,7 @@ object SymDenotations { case tp: Symbol => sourceOfSelf(tp.info) case tp: RefinedType => sourceOfSelf(tp.parent) case tp: AnnotatedType => sourceOfSelf(tp.parent) + case tp: ThisType => tp.cls } sourceOfSelf(selfType) case info: LazyType => @@ -1151,9 +1201,9 @@ object SymDenotations { final def isEffectivelySealed(using Context): Boolean = isOneOf(FinalOrSealed) || isClass && !isOneOf(EffectivelyOpenFlags) - final def isTransparentTrait(using Context): Boolean = - isAllOf(TransparentTrait) - || defn.assumedTransparentTraits.contains(symbol) + final def isTransparentClass(using Context): Boolean = + is(TransparentType) + || defn.isAssumedTransparent(symbol) || isClass && hasAnnotation(defn.TransparentTraitAnnot) /** The class containing this denotation which has the given effective name. */ @@ -1827,19 +1877,21 @@ object SymDenotations { super.info_=(tp) } - /** The symbols of the parent classes. */ - def parentSyms(using Context): List[Symbol] = info match { - case classInfo: ClassInfo => classInfo.declaredParents.map(_.classSymbol) + /** The types of the parent classes. */ + def parentTypes(using Context): List[Type] = info match + case classInfo: ClassInfo => classInfo.declaredParents case _ => Nil - } + + /** The symbols of the parent classes. */ + def parentSyms(using Context): List[Symbol] = + parentTypes.map(_.classSymbol) /** The symbol of the superclass, NoSymbol if no superclass exists */ - def superClass(using Context): Symbol = parentSyms match { - case parent :: _ => - if (parent.is(Trait)) NoSymbol else parent - case _ => - NoSymbol - } + def superClass(using Context): Symbol = parentTypes match + case parentType :: _ => + val parentCls = parentType.classSymbol + if parentCls.is(Trait) then NoSymbol else parentCls + case _ => NoSymbol /** The explicitly given self type (self types of modules are assumed to be * explcitly given here). @@ -1901,20 +1953,20 @@ object SymDenotations { def computeBaseData(implicit onBehalf: BaseData, ctx: Context): (List[ClassSymbol], BaseClassSet) = { def emptyParentsExpected = is(Package) || (symbol == defn.AnyClass) || ctx.erasedTypes && (symbol == defn.ObjectClass) - val psyms = parentSyms - if (psyms.isEmpty && !emptyParentsExpected) + val parents = parentTypes + if (parents.isEmpty && !emptyParentsExpected) onBehalf.signalProvisional() val builder = new BaseDataBuilder - def traverse(parents: List[Symbol]): Unit = parents match { + def traverse(parents: List[Type]): Unit = parents match { case p :: parents1 => - p match { + p.classSymbol match { case pcls: ClassSymbol => builder.addAll(pcls.baseClasses) case _ => assert(isRefinementClass || p.isError || ctx.mode.is(Mode.Interactive), s"$this has non-class parent: $p") } traverse(parents1) case nil => } - traverse(psyms) + traverse(parents) (classSymbol :: builder.baseClasses, builder.baseClassSet) } @@ -1951,6 +2003,17 @@ object SymDenotations { /** Hook to do a pre-enter test. Overridden in PackageDenotation */ protected def proceedWithEnter(sym: Symbol, mscope: MutableScope)(using Context): Boolean = true + final override def isValueClass(using Context): Boolean = + val di = initial.asClass + val anyVal = defn.AnyValClass + if di.baseDataCache.isValid && !ctx.erasedTypes then + // fast path that does not demand time travel + (symbol eq anyVal) || di.baseClassSet.contains(anyVal) + else + // We call derivesFrom at the initial phase both because AnyVal does not exist + // after Erasure and to avoid cyclic references caused by forcing denotations + atPhase(di.validFor.firstPhaseId)(di.derivesFrom(anyVal)) + /** Enter a symbol in current scope, and future scopes of same denotation. * Note: We require that this does not happen after the first time * someone does a findMember on a subclass. @@ -2092,7 +2155,7 @@ object SymDenotations { Stats.record("basetype cache entries") if (!baseTp.exists) Stats.record("basetype cache NoTypes") } - if (!tp.isProvisional) + if (!tp.isProvisional && !CapturingType.isUncachable(tp)) btrCache(tp) = baseTp else btrCache.remove(tp) // Remove any potential sentinel value @@ -2106,8 +2169,9 @@ object SymDenotations { def recur(tp: Type): Type = try { tp match { case tp: CachedType => - val baseTp = btrCache.lookup(tp) - if (baseTp != null) return ensureAcyclic(baseTp) + val baseTp: Type | Null = btrCache.lookup(tp) + if (baseTp != null) + return ensureAcyclic(baseTp) case _ => } if (Stats.monitored) { @@ -2162,13 +2226,12 @@ object SymDenotations { def computeApplied = { btrCache(tp) = NoPrefix val baseTp = - if (tycon.typeSymbol eq symbol) tp - else (tycon.typeParams: @unchecked) match { + if (tycon.typeSymbol eq symbol) && !tycon.isLambdaSub then tp + else (tycon.typeParams: @unchecked) match case LambdaParam(_, _) :: _ => recur(tp.superType) case tparams: List[Symbol @unchecked] => recur(tycon).substApprox(tparams, args) - } record(tp, baseTp) baseTp } @@ -2251,9 +2314,11 @@ object SymDenotations { var names = Set[Name]() def maybeAdd(name: Name) = if (keepOnly(thisType, name)) names += name try { - for (p <- parentSyms if p.isClass) - for (name <- p.asClass.memberNames(keepOnly)) - maybeAdd(name) + for ptype <- parentTypes do + ptype.classSymbol match + case pcls: ClassSymbol => + for name <- pcls.memberNames(keepOnly) do + maybeAdd(name) val ownSyms = if (keepOnly eq implicitFilter) if (this.is(Package)) Iterator.empty @@ -2438,13 +2503,13 @@ object SymDenotations { val youngest = assocFiles.filter(_.lastModified == lastModDate) val chosen = youngest.head def ambiguousFilesMsg(f: AbstractFile) = - em"""Toplevel definition $name is defined in - | $chosen - |and also in - | $f""" + i"""Toplevel definition $name is defined in + | $chosen + |and also in + | $f""" if youngest.size > 1 then - throw TypeError(i"""${ambiguousFilesMsg(youngest.tail.head)} - |One of these files should be removed from the classpath.""") + throw TypeError(em"""${ambiguousFilesMsg(youngest.tail.head)} + |One of these files should be removed from the classpath.""") // Warn if one of the older files comes from a different container. // In that case picking the youngest file is not necessarily what we want, @@ -2454,15 +2519,18 @@ object SymDenotations { try f.container == chosen.container catch case NonFatal(ex) => true if !ambiguityWarningIssued then for conflicting <- assocFiles.find(!sameContainer(_)) do - report.warning(i"""${ambiguousFilesMsg(conflicting.nn)} - |Keeping only the definition in $chosen""") + report.warning(em"""${ambiguousFilesMsg(conflicting.nn)} + |Keeping only the definition in $chosen""") ambiguityWarningIssued = true multi.filterWithPredicate(_.symbol.associatedFile == chosen) end dropStale - if symbol eq defn.ScalaPackageClass then + if name == nme.CONSTRUCTOR then + NoDenotation // packages don't have constructors, even if package objects do. + else if symbol eq defn.ScalaPackageClass then + // revert order: search package first, then nested package objects val denots = super.computeMembersNamed(name) - if denots.exists || name == nme.CONSTRUCTOR then denots + if denots.exists then denots else recur(packageObjs, NoDenotation) else recur(packageObjs, NoDenotation) end computeMembersNamed @@ -2505,7 +2573,6 @@ object SymDenotations { @sharable object NoDenotation extends SymDenotation(NoSymbol, NoSymbol, "".toTermName, Permanent, NoType) { - override def isType: Boolean = false override def isTerm: Boolean = false override def exists: Boolean = false override def owner: Symbol = throw new AssertionError("NoDenotation.owner") @@ -2802,7 +2869,7 @@ object SymDenotations { } def isValidAt(phase: Phase)(using Context) = - checkedPeriod == ctx.period || + checkedPeriod.code == ctx.period.code || createdAt.runId == ctx.runId && createdAt.phaseId < unfusedPhases.length && sameGroup(unfusedPhases(createdAt.phaseId), phase) && diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index c5ae98853061..9eb67b468cfa 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -88,8 +88,8 @@ object SymbolLoaders { return NoSymbol } else - throw new TypeError( - i"""$owner contains object and package with same name: $pname + throw TypeError( + em"""$owner contains object and package with same name: $pname |one of them needs to be removed from classpath""") newModuleSymbol(owner, pname, PackageCreationFlags, PackageCreationFlags, completer).entered @@ -331,8 +331,9 @@ abstract class SymbolLoader extends LazyType { self => if (ctx.debug) ex.printStackTrace() val msg = ex.getMessage() report.error( - if (msg == null) "i/o error while loading " + root.name - else "error while loading " + root.name + ",\n" + msg) + if msg == null then em"i/o error while loading ${root.name}" + else em"""error while loading ${root.name}, + |$msg""") } try { val start = System.currentTimeMillis diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 73fbcca6f6ed..aa3ae0c3c513 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -103,7 +103,7 @@ object Symbols { /** The current denotation of this symbol */ final def denot(using Context): SymDenotation = { util.Stats.record("Symbol.denot") - if (checkedPeriod == ctx.period) lastDenot + if checkedPeriod.code == ctx.period.code then lastDenot else computeDenot(lastDenot) } @@ -348,6 +348,27 @@ object Symbols { def paramVariance(using Context): Variance = denot.variance def paramRef(using Context): TypeRef = denot.typeRef + /** Copy a symbol, overriding selective fields. + * Note that `coord` and `associatedFile` will be set from the fields in `owner`, not + * the fields in `sym`. */ + def copy(using Context)( + owner: Symbol = this.owner, + name: ThisName = name, + flags: FlagSet = this.flags, + info: Type = this.info, + privateWithin: Symbol = this.privateWithin, + coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap + associatedFile: AbstractFile | Null = null // Can be `= owner.associatedFile` once we bootstrap + ): Symbol = { + val coord1 = if (coord == NoCoord) owner.coord else coord + val associatedFile1 = if (associatedFile == null) owner.associatedFile else associatedFile + + if isClass then + newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, associatedFile1) + else + newSymbol(owner, name, flags, info, privateWithin, coord1) + } + // -------- Printing -------------------------------------------------------- /** The prefix string to be used when displaying this symbol without denotation */ @@ -469,30 +490,6 @@ object Symbols { NoDenotation // force it in order to set `denot` field of NoSymbol - extension [N <: Name](sym: Symbol { type ThisName = N })(using Context) { - /** Copy a symbol, overriding selective fields. - * Note that `coord` and `associatedFile` will be set from the fields in `owner`, not - * the fields in `sym`. - */ - def copy( - owner: Symbol = sym.owner, - name: N = sym.name, - flags: FlagSet = sym.flags, - info: Type = sym.info, - privateWithin: Symbol = sym.privateWithin, - coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap - associatedFile: AbstractFile | Null = null // Can be `= owner.associatedFile` once we bootstrap - ): Symbol = { - val coord1 = if (coord == NoCoord) owner.coord else coord - val associatedFile1 = if (associatedFile == null) owner.associatedFile else associatedFile - - if (sym.isClass) - newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, associatedFile1) - else - newSymbol(owner, name, flags, info, privateWithin, coord1) - } - } - /** Makes all denotation operations available on symbols */ implicit def toDenot(sym: Symbol)(using Context): SymDenotation = sym.denot @@ -633,6 +630,32 @@ object Symbols { owner.thisType, modcls, parents, decls, TermRef(owner.thisType, module)), privateWithin, coord, assocFile) + /** Same as `newCompleteModuleSymbol` except that `parents` can be a list of arbitrary + * types which get normalized into type refs and parameter bindings. + */ + def newNormalizedModuleSymbol( + owner: Symbol, + name: TermName, + modFlags: FlagSet, + clsFlags: FlagSet, + parentTypes: List[Type], + decls: Scope, + privateWithin: Symbol = NoSymbol, + coord: Coord = NoCoord, + assocFile: AbstractFile | Null = null)(using Context): TermSymbol = { + def completer(module: Symbol) = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val cls = denot.asClass.classSymbol + val decls = newScope + denot.info = ClassInfo(owner.thisType, cls, parentTypes.map(_.dealias), decls, TermRef(owner.thisType, module)) + } + } + newModuleSymbol( + owner, name, modFlags, clsFlags, + (module, modcls) => completer(module), + privateWithin, coord, assocFile) + } + /** Create a package symbol with associated package class * from its non-info fields and a lazy type for loading the package's members. */ @@ -663,7 +686,7 @@ object Symbols { addToGadt: Boolean = true, flags: FlagSet = EmptyFlags)(using Context): Symbol = { val sym = newSymbol(ctx.owner, name, Case | flags, info, coord = span) - if (addToGadt && name.isTypeName) ctx.gadt.addToConstraint(sym) + if (addToGadt && name.isTypeName) ctx.gadtState.addToConstraint(sym) sym } diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 58f9732edf1f..2e8aee4df96c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -9,9 +9,11 @@ import SymDenotations.LazyType import Decorators._ import util.Stats._ import Names._ +import StdNames.nme import Flags.{Module, Provisional} import dotty.tools.dotc.config.Config import cc.boxedUnlessFun +import dotty.tools.dotc.transform.TypeUtils.isErasedValueType object TypeApplications { @@ -204,6 +206,12 @@ class TypeApplications(val self: Type) extends AnyVal { } } + /** Substitute in `self` the type parameters of `tycon` by some other types. */ + final def substTypeParams(tycon: Type, to: List[Type])(using Context): Type = + (tycon.typeParams: @unchecked) match + case LambdaParam(lam, _) :: _ => self.substParams(lam, to) + case params: List[Symbol @unchecked] => self.subst(params, to) + /** If `self` is a higher-kinded type, its type parameters, otherwise Nil */ final def hkTypeParams(using Context): List[TypeParamInfo] = if (isLambdaSub) typeParams else Nil @@ -497,6 +505,14 @@ class TypeApplications(val self: Type) extends AnyVal { case AppliedType(tycon, args) => args.boxedUnlessFun(tycon) case _ => Nil + /** If this is an encoding of a function type, return its arguments, otherwise return Nil. + * Handles `ErasedFunction`s and poly functions gracefully. + */ + final def functionArgInfos(using Context): List[Type] = self.dealias match + case RefinedType(parent, nme.apply, mt: MethodType) if defn.isErasedFunctionType(parent) => (mt.paramInfos :+ mt.resultType) + case RefinedType(parent, nme.apply, mt: MethodType) if parent.typeSymbol eq defn.PolyFunctionClass => (mt.paramInfos :+ mt.resultType) + case _ => self.dropDependentRefinement.dealias.argInfos + /** Argument types where existential types in arguments are disallowed */ def argTypes(using Context): List[Type] = argInfos mapConserve noBounds @@ -527,6 +543,9 @@ class TypeApplications(val self: Type) extends AnyVal { case JavaArrayType(elemtp) => elemtp case tp: OrType if tp.tp1.isBottomType => tp.tp2.elemType case tp: OrType if tp.tp2.isBottomType => tp.tp1.elemType - case _ => self.baseType(defn.SeqClass).argInfos.headOption.getOrElse(NoType) + case _ => + self.baseType(defn.SeqClass) + .orElse(self.baseType(defn.ArrayClass)) + .argInfos.headOption.getOrElse(NoType) } } diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index adce363dc3f4..465978d329e6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -23,7 +23,7 @@ import typer.ProtoTypes.constrained import typer.Applications.productSelectorTypes import reporting.trace import annotation.constructorOnly -import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam} +import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam, isAlwaysPure} /** Provides methods to compare types. */ @@ -60,8 +60,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Indicates whether the subtype check used GADT bounds */ private var GADTused: Boolean = false - protected var canWidenAbstract: Boolean = true - private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance @@ -118,7 +116,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private def isBottom(tp: Type) = tp.widen.isRef(NothingClass) protected def gadtBounds(sym: Symbol)(using Context) = ctx.gadt.bounds(sym) - protected def gadtAddBound(sym: Symbol, b: Type, isUpper: Boolean): Boolean = ctx.gadt.addBound(sym, b, isUpper) + protected def gadtAddBound(sym: Symbol, b: Type, isUpper: Boolean): Boolean = ctx.gadtState.addBound(sym, b, isUpper) protected def typeVarInstance(tvar: TypeVar)(using Context): Type = tvar.underlying @@ -285,17 +283,28 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val ctx = comparerContext given Context = ctx // optimization for performance val info2 = tp2.info + + /** Does `tp2` have a stable prefix? + * If that's not the case, following an alias via asSeenFrom could be lossy + * so we should not conclude `false` if comparing aliases fails. + * See pos/i17064.scala for a test case + */ + def hasStablePrefix(tp: NamedType) = + tp.prefix.isStable + info2 match case info2: TypeAlias => if recur(tp1, info2.alias) then return true - if tp2.asInstanceOf[TypeRef].canDropAlias then return false + if tp2.asInstanceOf[TypeRef].canDropAlias && hasStablePrefix(tp2) then + return false case _ => tp1 match case tp1: NamedType => tp1.info match { case info1: TypeAlias => if recur(info1.alias, tp2) then return true - if tp1.asInstanceOf[TypeRef].canDropAlias then return false + if tp1.asInstanceOf[TypeRef].canDropAlias && hasStablePrefix(tp2) then + return false case _ => } val sym2 = tp2.symbol @@ -304,13 +313,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // For convenience we want X$ <:< X.type // This is safe because X$ self-type is X.type sym1 = sym1.companionModule - if ((sym1 ne NoSymbol) && (sym1 eq sym2)) + if (sym1 ne NoSymbol) && (sym1 eq sym2) then ctx.erasedTypes || sym1.isStaticOwner || isSubPrefix(tp1.prefix, tp2.prefix) || thirdTryNamed(tp2) else ( (tp1.name eq tp2.name) + && !sym1.is(Private) && tp2.isPrefixDependentMemberRef && isSubPrefix(tp1.prefix, tp2.prefix) && tp1.signature == tp2.signature @@ -420,16 +430,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling true } def compareTypeParamRef = - assumedTrue(tp1) || - tp2.match { - case tp2: TypeParamRef => constraint.isLess(tp1, tp2) - case _ => false - } || - isSubTypeWhenFrozen(bounds(tp1).hi.boxed, tp2) || { - if (canConstrain(tp1) && !approx.high) - addConstraint(tp1, tp2, fromBelow = false) && flagNothingBound - else thirdTry - } + assumedTrue(tp1) + || tp2.dealias.match + case tp2a: TypeParamRef => constraint.isLess(tp1, tp2a) + case tp2a: AndType => recur(tp1, tp2a) + case _ => false + || isSubTypeWhenFrozen(bounds(tp1).hi.boxed, tp2) + || (if canConstrain(tp1) && !approx.high then + addConstraint(tp1, tp2, fromBelow = false) && flagNothingBound + else thirdTry) + compareTypeParamRef case tp1: ThisType => val cls1 = tp1.cls @@ -522,7 +532,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling res case CapturingType(parent1, refs1) => - if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + if tp2.isAny then true + else if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + || !ctx.mode.is(Mode.CheckBoundsOrSelfType) && tp1.isAlwaysPure then recur(parent1, tp2) else thirdTry case tp1: AnnotatedType if !tp1.isRefining => @@ -585,7 +597,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } def compareTypeParamRef(tp2: TypeParamRef): Boolean = - assumedTrue(tp2) || { + assumedTrue(tp2) + || { val alwaysTrue = // The following condition is carefully formulated to catch all cases // where the subtype relation is true without needing to add a constraint @@ -596,11 +609,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // widening in `fourthTry` before adding to the constraint. if (frozenConstraint) recur(tp1, bounds(tp2).lo.boxed) else isSubTypeWhenFrozen(tp1, tp2) - alwaysTrue || { - if (canConstrain(tp2) && !approx.low) - addConstraint(tp2, tp1.widenExpr, fromBelow = true) - else fourthTry - } + alwaysTrue + || tp1.dealias.match + case tp1a: OrType => recur(tp1a, tp2) + case _ => false + || (if canConstrain(tp2) && !approx.low then + addConstraint(tp2, tp1.widenExpr, fromBelow = true) + else fourthTry) } def thirdTry: Boolean = tp2 match { @@ -633,6 +648,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case (info1: MethodType, info2: MethodType) => matchingMethodParams(info1, info2, precise = false) && isSubInfo(info1.resultType, info2.resultType.subst(info2, info1)) + case (info1 @ CapturingType(parent1, refs1), info2: Type) => + subCaptures(refs1, info2.captureSet, frozenConstraint).isOK && sameBoxed(info1, info2, refs1) + && isSubInfo(parent1, info2) + case (info1: Type, CapturingType(parent2, refs2)) => + val refs1 = info1.captureSet + (refs1.isAlwaysEmpty || subCaptures(refs1, refs2, frozenConstraint).isOK) && sameBoxed(info1, info2, refs1) + && isSubInfo(info1, parent2) case _ => isSubType(info1, info2) @@ -819,7 +841,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if refs1.isAlwaysEmpty then recur(tp1, parent2) else subCaptures(refs1, refs2, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) - && recur(tp1.widen.stripCapturing, parent2) + && (recur(tp1.widen.stripCapturing, parent2) + || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) + // this alternative is needed in case the right hand side is a + // capturing type that contains the lhs as an alternative of a union type. + ) catch case ex: AssertionError => println(i"assertion failed while compare captured $tp1 <:< $tp2") throw ex @@ -1066,12 +1092,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * * - k := args.length * - d := otherArgs.length - k + * - T_0, ..., T_k-1 fresh type parameters + * - bodyArgs := otherArgs.take(d), T_0, ..., T_k-1 * - * `adaptedTycon` will be: + * Then, * - * [T_0, ..., T_k-1] =>> otherTycon[otherArgs(0), ..., otherArgs(d-1), T_0, ..., T_k-1] + * adaptedTycon := [T_0, ..., T_k-1] =>> otherTycon[bodyArgs] * - * where `T_n` has the same bounds as `otherTycon.typeParams(d+n)` + * where the bounds of `T_i` are set based on the bounds of `otherTycon.typeParams(d+i)` + * after substituting type parameter references by the corresponding argument + * in `bodyArgs` (see `adaptedBounds` in the implementation). * * Historical note: this strategy is known in Scala as "partial unification" * (even though the type constructor variable isn't actually unified but only @@ -1096,11 +1126,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling variancesConform(remainingTparams, tparams) && { val adaptedTycon = if d > 0 then + val initialArgs = otherArgs.take(d) + /** The arguments passed to `otherTycon` in the body of `tl` */ + def bodyArgs(tl: HKTypeLambda) = initialArgs ++ tl.paramRefs + /** The bounds of the type parameters of `tl` */ + def adaptedBounds(tl: HKTypeLambda) = + val bodyArgsComputed = bodyArgs(tl) + remainingTparams.map(_.paramInfo) + .mapconserve(_.substTypeParams(otherTycon, bodyArgsComputed).bounds) + HKTypeLambda(remainingTparams.map(_.paramName))( - tl => remainingTparams.map(remainingTparam => - tl.integrate(remainingTparams, remainingTparam.paramInfo).bounds), - tl => otherTycon.appliedTo( - otherArgs.take(d) ++ tl.paramRefs)) + adaptedBounds, + tl => otherTycon.appliedTo(bodyArgs(tl))) else otherTycon (assumedTrue(tycon) || directionalIsSubType(tycon, adaptedTycon)) && @@ -1313,8 +1350,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } } || tryLiftedToThis2 - case _: TypeVar => - recur(tp1, tp2.superType) + case tv: TypeVar => + if tv.isInstantiated then + recur(tp1, tp2.superType) + else + compareAppliedType2(tp2, tv.origin, args2) case tycon2: AnnotatedType if !tycon2.isRefining => recur(tp1, tp2.superType) case tycon2: AppliedType => @@ -1417,11 +1457,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if tp2 eq NoType then false else if tp1 eq tp2 then true else - val saved = constraint - val savedGadt = ctx.gadt.fresh + val savedCstr = constraint + val savedGadt = ctx.gadt inline def restore() = - state.constraint = saved - ctx.gadt.restore(savedGadt) + state.constraint = savedCstr + ctx.gadtState.restore(savedGadt) val savedSuccessCount = successCount try recCount += 1 @@ -1827,20 +1867,17 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling */ private def necessaryEither(op1: => Boolean, op2: => Boolean): Boolean = val preConstraint = constraint - val preGadt = ctx.gadt.fresh + val preGadt = ctx.gadt def allSubsumes(leftGadt: GadtConstraint, rightGadt: GadtConstraint, left: Constraint, right: Constraint): Boolean = - subsumes(left, right, preConstraint) && preGadt.match - case preGadt: ProperGadtConstraint => - preGadt.subsumes(leftGadt, rightGadt, preGadt) - case _ => - true + subsumes(left, right, preConstraint) + && subsumes(leftGadt.constraint, rightGadt.constraint, preGadt.constraint) if op1 then val op1Constraint = constraint - val op1Gadt = ctx.gadt.fresh + val op1Gadt = ctx.gadt constraint = preConstraint - ctx.gadt.restore(preGadt) + ctx.gadtState.restore(preGadt) if op2 then if allSubsumes(op1Gadt, ctx.gadt, op1Constraint, constraint) then gadts.println(i"GADT CUT - prefer ${ctx.gadt} over $op1Gadt") @@ -1849,15 +1886,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling gadts.println(i"GADT CUT - prefer $op1Gadt over ${ctx.gadt}") constr.println(i"CUT - prefer $op1Constraint over $constraint") constraint = op1Constraint - ctx.gadt.restore(op1Gadt) + ctx.gadtState.restore(op1Gadt) else gadts.println(i"GADT CUT - no constraint is preferable, reverting to $preGadt") constr.println(i"CUT - no constraint is preferable, reverting to $preConstraint") constraint = preConstraint - ctx.gadt.restore(preGadt) + ctx.gadtState.restore(preGadt) else constraint = op1Constraint - ctx.gadt.restore(op1Gadt) + ctx.gadtState.restore(op1Gadt) true else op2 end necessaryEither @@ -1954,6 +1991,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val info1 = m.info.widenExpr isSubInfo(info1, tp2.refinedInfo.widenExpr, m.symbol.info.orElse(info1)) || matchAbstractTypeMember(m.info) + || (tp1.isStable && isSubType(TermRef(tp1, m.symbol), tp2.refinedInfo)) tp1.member(name) match // inlined hasAltWith for performance case mbr: SingleDenotation => qualifies(mbr) @@ -2028,10 +2066,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling gadts.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.toString} ${bound.isRef(tparam)}") if (bound.isRef(tparam)) false else - val savedGadt = ctx.gadt.fresh - val success = gadtAddBound(tparam, bound, isUpper) - if !success then ctx.gadt.restore(savedGadt) - success + ctx.gadtState.rollbackGadtUnless(gadtAddBound(tparam, bound, isUpper)) } } @@ -2095,7 +2130,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case nil => formals2.isEmpty } - loop(tp1.paramInfos, tp2.paramInfos) + // If methods have erased parameters, then the erased parameters must match + val erasedValid = (!tp1.hasErasedParams && !tp2.hasErasedParams) || (tp1.erasedParams == tp2.erasedParams) + + erasedValid && loop(tp1.paramInfos, tp2.paramInfos) } /** Do the parameter types of `tp1` and `tp2` match in a way that allows `tp1` @@ -2698,7 +2736,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling x && { t.dealias match { case tp: TypeRef if !tp.symbol.isClass => false - case _: SkolemType | _: TypeVar | _: TypeParamRef => false + case _: SkolemType | _: TypeVar | _: TypeParamRef | _: TypeBounds => false case _ => foldOver(x, t) } } @@ -3142,7 +3180,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { tp case Nil => val casesText = MatchTypeTrace.noMatchesText(scrut, cases) - throw new TypeError(s"Match type reduction $casesText") + throw TypeError(em"Match type reduction $casesText") inFrozenConstraint { // Empty types break the basic assumption that if a scrutinee and a @@ -3226,7 +3264,7 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } override def gadtAddBound(sym: Symbol, b: Type, isUpper: Boolean): Boolean = - traceIndented(s"add GADT constraint ${show(sym)} ${if isUpper then "<:" else ">:"} ${show(b)} $frozenNotice, GADT constraint = ${show(ctx.gadt.debugBoundsDescription)}") { + traceIndented(s"add GADT constraint ${show(sym)} ${if isUpper then "<:" else ">:"} ${show(b)} $frozenNotice, GADT constraint = ${show(ctx.gadt)}") { super.gadtAddBound(sym, b, isUpper) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 1fc7ee5d22a8..9bcb3eca36bb 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -520,8 +520,9 @@ object TypeErasure { case _: ClassInfo => true case _ => false } - case tp: TypeParamRef => false - case tp: TypeBounds => false + case _: TypeParamRef => false + case _: TypeBounds => false + case _: MatchType => false case tp: TypeProxy => hasStableErasure(tp.translucentSuperType) case tp: AndType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2) case tp: OrType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2) @@ -535,7 +536,14 @@ object TypeErasure { val paramss = res.paramNamess assert(paramss.length == 1) erasure(defn.FunctionType(paramss.head.length, - isContextual = res.isImplicitMethod, isErased = res.isErasedMethod)) + isContextual = res.isImplicitMethod)) + + def eraseErasedFunctionApply(erasedFn: MethodType)(using Context): Type = + val fnType = defn.FunctionType( + n = erasedFn.erasedParams.count(_ == false), + isContextual = erasedFn.isContextualMethod, + ) + erasure(fnType) } import TypeErasure._ @@ -591,9 +599,9 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst tp case tp: TypeRef => val sym = tp.symbol - if (!sym.isClass) this(tp.translucentSuperType) - else if (semiEraseVCs && isDerivedValueClass(sym)) eraseDerivedValueClass(tp) - else if (defn.isSyntheticFunctionClass(sym)) defn.functionTypeErasure(sym) + if !sym.isClass then this(checkedSuperType(tp)) + else if semiEraseVCs && isDerivedValueClass(sym) then eraseDerivedValueClass(tp) + else if defn.isSyntheticFunctionClass(sym) then defn.functionTypeErasure(sym) else eraseNormalClassRef(tp) case tp: AppliedType => val tycon = tp.tycon @@ -601,7 +609,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst else if (tycon.isRef(defn.PairClass)) erasePair(tp) else if (tp.isRepeatedParam) apply(tp.translateFromRepeated(toArray = sourceLanguage.isJava)) else if (semiEraseVCs && isDerivedValueClass(tycon.classSymbol)) eraseDerivedValueClass(tp) - else apply(tp.translucentSuperType) + else this(checkedSuperType(tp)) case tp: TermRef => this(underlyingOfTermRef(tp)) case _: ThisType => @@ -612,6 +620,8 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst defn.FunctionType(0) case RefinedType(parent, nme.apply, refinedInfo) if parent.typeSymbol eq defn.PolyFunctionClass => erasePolyFunctionApply(refinedInfo) + case RefinedType(parent, nme.apply, refinedInfo: MethodType) if defn.isErasedFunctionType(parent) => + eraseErasedFunctionApply(refinedInfo) case tp: TypeProxy => this(tp.underlying) case tp @ AndType(tp1, tp2) => @@ -638,7 +648,13 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst case tp: MethodType => def paramErasure(tpToErase: Type) = erasureFn(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, wildcardOK)(tpToErase) - val (names, formals0) = if (tp.isErasedMethod) (Nil, Nil) else (tp.paramNames, tp.paramInfos) + val (names, formals0) = if tp.hasErasedParams then + tp.paramNames + .zip(tp.paramInfos) + .zip(tp.erasedParams) + .collect{ case (param, isErased) if !isErased => param } + .unzip + else (tp.paramNames, tp.paramInfos) val formals = formals0.mapConserve(paramErasure) eraseResult(tp.resultType) match { case rt: MethodType => @@ -689,6 +705,18 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst tp } + /** Like translucentSuperType, but issue a fatal error if it does not exist. */ + private def checkedSuperType(tp: TypeProxy)(using Context): Type = + val tp1 = tp.translucentSuperType + if !tp1.exists then + val msg = tp.typeConstructor match + case tycon: TypeRef => + MissingType(tycon.prefix, tycon.name).toMessage.message + case _ => + i"Cannot resolve reference to $tp" + throw FatalError(msg) + tp1 + /** Widen term ref, skipping any `()` parameter of an eventual getter. Used to erase a TermRef. * Since getters are introduced after erasure, one would think that erasing a TermRef * could just use `widen`. However, it's possible that the TermRef got read from a class @@ -815,7 +843,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst throw new MissingType(tp.prefix, tp.name) val sym = tp.symbol if (!sym.isClass) { - val info = tp.translucentSuperType + val info = checkedSuperType(tp) if (!info.exists) assert(false, i"undefined: $tp with symbol $sym") return sigName(info) } @@ -841,7 +869,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst sigName( // todo: what about repeatedParam? if (erasureDependsOnArgs(sym)) this(tp) else if (sym.isClass) tp.underlying - else tp.translucentSuperType) + else checkedSuperType(tp)) case ErasedValueType(_, underlying) => sigName(underlying) case JavaArrayType(elem) => @@ -858,6 +886,8 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst // because RefinedTypes <: TypeProxy and it would be caught by // the case immediately below sigName(this(tp)) + case tp @ RefinedType(parent, nme.apply, refinedInfo) if defn.isErasedFunctionType(parent) => + sigName(this(tp)) case tp: TypeProxy => sigName(tp.underlying) case tp: WildcardType => diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 5816e1254873..24a207da6836 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -12,37 +12,58 @@ import Denotations._ import Decorators._ import reporting._ import ast.untpd -import config.Printers.cyclicErrors - -class TypeError(msg: String) extends Exception(msg) { - def this() = this("") - final def toMessage(using Context): Message = - withMode(Mode.Printing)(produceMessage) - def produceMessage(using Context): Message = super.getMessage.nn - override def getMessage: String = super.getMessage.nn -} - -class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name]) extends TypeError { - override def produceMessage(using Context): Message = - i"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" -} - -class MissingType(pre: Type, name: Name) extends TypeError { +import config.Printers.{cyclicErrors, noPrinter} + +import scala.annotation.constructorOnly + +abstract class TypeError(using creationContext: Context) extends Exception(""): + + /** Will the stack trace of this exception be filled in? + * This is expensive and only useful for debugging purposes. + */ + def computeStackTrace: Boolean = + ctx.debug || (cyclicErrors != noPrinter && this.isInstanceOf[CyclicReference] && !(ctx.mode is Mode.CheckCyclic)) + + override def fillInStackTrace(): Throwable = + if computeStackTrace then super.fillInStackTrace().nn + else this + + /** Convert to message. This takes an additional Context, so that we + * use the context when the message is first produced, i.e. when the TypeError + * is handled. This makes a difference for CyclicErrors since we need to know + * the context where the completed symbol is referenced, but the creation + * context of the CyclicReference is the completion context for the symbol. + * See i2887b for a test case, where we want to see + * "recursive or overloaded method needs result type". + */ + def toMessage(using Context): Message + + /** Uses creationContext to produce the message */ + override def getMessage: String = toMessage.message + +object TypeError: + def apply(msg: Message)(using Context) = new TypeError: + def toMessage(using Context) = msg +end TypeError + +class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name])(using Context) extends TypeError: + def toMessage(using Context) = em"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" + +class MissingType(pre: Type, name: Name)(using Context) extends TypeError: private def otherReason(pre: Type)(using Context): String = pre match { case pre: ThisType if pre.cls.givenSelfType.exists => i"\nor the self type of $pre might not contain all transitive dependencies" case _ => "" } - override def produceMessage(using Context): Message = { - if (ctx.debug) printStackTrace() - i"""cannot resolve reference to type $pre.$name - |the classfile defining the type might be missing from the classpath${otherReason(pre)}""" - } -} + override def toMessage(using Context): Message = + if ctx.debug then printStackTrace() + em"""cannot resolve reference to type $pre.$name + |the classfile defining the type might be missing from the classpath${otherReason(pre)}""" +end MissingType -class RecursionOverflow(val op: String, details: => String, val previous: Throwable, val weight: Int) -extends TypeError { +class RecursionOverflow(val op: String, details: => String, val previous: Throwable, val weight: Int)(using Context) +extends TypeError: def explanation: String = s"$op $details" @@ -69,50 +90,51 @@ extends TypeError { (rs.map(_.explanation): List[String]).mkString("\n ", "\n| ", "") } - override def produceMessage(using Context): Message = NoExplanation { + override def toMessage(using Context): Message = val mostCommon = recursions.groupBy(_.op).toList.maxBy(_._2.map(_.weight).sum)._2.reverse - s"""Recursion limit exceeded. - |Maybe there is an illegal cyclic reference? - |If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. - |For the unprocessed stack trace, compile with -Yno-decode-stacktraces. - |A recurring operation is (inner to outer): - |${opsString(mostCommon)}""".stripMargin - } + em"""Recursion limit exceeded. + |Maybe there is an illegal cyclic reference? + |If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. + |For the unprocessed stack trace, compile with -Yno-decode-stacktraces. + |A recurring operation is (inner to outer): + |${opsString(mostCommon).stripMargin}""" override def fillInStackTrace(): Throwable = this override def getStackTrace(): Array[StackTraceElement] = previous.getStackTrace().asInstanceOf -} +end RecursionOverflow /** Post-process exceptions that might result from StackOverflow to add * tracing information while unwalking the stack. */ // Beware: Since this object is only used when handling a StackOverflow, this code // cannot consume significant amounts of stack. -object handleRecursive { +object handleRecursive: + inline def underlyingStackOverflowOrNull(exc: Throwable): Throwable | Null = + var e: Throwable | Null = exc + while e != null && !e.isInstanceOf[StackOverflowError] do e = e.getCause + e + def apply(op: String, details: => String, exc: Throwable, weight: Int = 1)(using Context): Nothing = - if (ctx.settings.YnoDecodeStacktraces.value) + if ctx.settings.YnoDecodeStacktraces.value then throw exc - else - exc match { - case _: RecursionOverflow => - throw new RecursionOverflow(op, details, exc, weight) - case _ => - var e: Throwable | Null = exc - while (e != null && !e.isInstanceOf[StackOverflowError]) e = e.getCause - if (e != null) throw new RecursionOverflow(op, details, e, weight) - else throw exc - } -} + else exc match + case _: RecursionOverflow => + throw new RecursionOverflow(op, details, exc, weight) + case _ => + val so = underlyingStackOverflowOrNull(exc) + if so != null then throw new RecursionOverflow(op, details, so, weight) + else throw exc +end handleRecursive /** * This TypeError signals that completing denot encountered a cycle: it asked for denot.info (or similar), * so it requires knowing denot already. * @param denot */ -class CyclicReference private (val denot: SymDenotation) extends TypeError { +class CyclicReference private (val denot: SymDenotation)(using Context) extends TypeError: var inImplicitSearch: Boolean = false - override def produceMessage(using Context): Message = { + override def toMessage(using Context): Message = val cycleSym = denot.symbol // cycleSym.flags would try completing denot and would fail, but here we can use flagsUNSAFE to detect flags @@ -149,19 +171,16 @@ class CyclicReference private (val denot: SymDenotation) extends TypeError { CyclicReferenceInvolving(denot) errorMsg(ctx) - } -} + end toMessage -object CyclicReference { - def apply(denot: SymDenotation)(using Context): CyclicReference = { +object CyclicReference: + def apply(denot: SymDenotation)(using Context): CyclicReference = val ex = new CyclicReference(denot) - if (!(ctx.mode is Mode.CheckCyclic) || ctx.settings.Ydebug.value) { + if ex.computeStackTrace then cyclicErrors.println(s"Cyclic reference involving! $denot") val sts = ex.getStackTrace.asInstanceOf[Array[StackTraceElement]] for (elem <- sts take 200) cyclicErrors.println(elem.toString) - } ex - } -} +end CyclicReference diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index 7ec0f12db3b6..b5684b07f181 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -91,7 +91,7 @@ object TypeEval: val result = try op catch case e: Throwable => - throw new TypeError(e.getMessage.nn) + throw TypeError(em"${e.getMessage.nn}") ConstantType(Constant(result)) def constantFold1[T](extractor: Type => Option[T], op: T => Any): Option[Type] = diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 0c58cab0347f..6809e4b9083c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Contexts._, Types._, Symbols._, Names._, Flags._ +import Contexts._, Types._, Symbols._, Names._, NameKinds.*, Flags._ import SymDenotations._ import util.Spans._ import util.Stats @@ -13,6 +13,7 @@ import ast.tpd._ import reporting.trace import config.Printers.typr import config.Feature +import transform.SymUtils.* import typer.ProtoTypes._ import typer.ForceDegree import typer.Inferencing._ @@ -186,7 +187,7 @@ object TypeOps: if (normed.exists) normed else mapOver case tp: MethodicType => // See documentation of `Types#simplified` - val addTypeVars = new TypeMap: + val addTypeVars = new TypeMap with IdempotentCaptRefMap: val constraint = ctx.typerState.constraint def apply(t: Type): Type = t match case t: TypeParamRef => constraint.typeVarOfParam(t).orElse(t) @@ -209,7 +210,7 @@ object TypeOps: /** Approximate union type by intersection of its dominators. * That is, replace a union type Tn | ... | Tn - * by the smallest intersection type of base-class instances of T1,...,Tn. + * by the smallest intersection type of accessible base-class instances of T1,...,Tn. * Example: Given * * trait C[+T] @@ -225,16 +226,18 @@ object TypeOps: */ def orDominator(tp: Type)(using Context): Type = { - /** a faster version of cs1 intersect cs2 that treats bottom types correctly */ + /** a faster version of cs1 intersect cs2 */ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = - if cs1.head == defn.NothingClass then cs2 - else if cs2.head == defn.NothingClass then cs1 - else if cs1.head == defn.NullClass && !ctx.explicitNulls && cs2.head.derivesFrom(defn.ObjectClass) then cs2 - else if cs2.head == defn.NullClass && !ctx.explicitNulls && cs1.head.derivesFrom(defn.ObjectClass) then cs1 - else - val cs2AsSet = new util.HashSet[ClassSymbol](128) - cs2.foreach(cs2AsSet += _) - cs1.filter(cs2AsSet.contains) + val cs2AsSet = BaseClassSet(cs2) + cs1.filter(cs2AsSet.contains) + + /** a version of Type#baseClasses that treats bottom types correctly */ + def orBaseClasses(tp: Type): List[ClassSymbol] = tp.stripTypeVar match + case OrType(tp1, tp2) => + if tp1.isBottomType && (tp1 frozen_<:< tp2) then orBaseClasses(tp2) + else if tp2.isBottomType && (tp2 frozen_<:< tp1) then orBaseClasses(tp1) + else intersect(orBaseClasses(tp1), orBaseClasses(tp2)) + case _ => tp.baseClasses /** The minimal set of classes in `cs` which derive all other classes in `cs` */ def dominators(cs: List[ClassSymbol], accu: List[ClassSymbol]): List[ClassSymbol] = (cs: @unchecked) match { @@ -368,8 +371,14 @@ object TypeOps: } } + def isAccessible(cls: ClassSymbol) = + if cls.isOneOf(AccessFlags) || cls.privateWithin.exists then + cls.isAccessibleFrom(tp.baseType(cls).normalizedPrefix) + else true + // Step 3: Intersect base classes of both sides - val commonBaseClasses = tp.mapReduceOr(_.baseClasses)(intersect) + val commonBaseClasses = orBaseClasses(tp).filterConserve(isAccessible) + val doms = dominators(commonBaseClasses, Nil) def baseTp(cls: ClassSymbol): Type = tp.baseType(cls).mapReduceOr(identity)(mergeRefinedOrApplied) @@ -496,7 +505,7 @@ object TypeOps: override def derivedSelect(tp: NamedType, pre: Type) = if (pre eq tp.prefix) tp - else tryWiden(tp, tp.prefix).orElse { + else (if pre.isSingleton then NoType else tryWiden(tp, tp.prefix)).orElse { if (tp.isTerm && variance > 0 && !pre.isSingleton) apply(tp.info.widenExpr) else if (upper(pre).member(tp.name).exists) @@ -531,6 +540,18 @@ object TypeOps: val sym = tp.symbol forbidden.contains(sym) + /** We need to split the set into upper and lower approximations + * only if it contains a local element. The idea here is that at the + * time we perform an `avoid` all local elements are already accounted for + * and no further elements will be added afterwards. So we can just keep + * the set as it is. See comment by @linyxus on #16261. + */ + override def needsRangeIfInvariant(refs: CaptureSet): Boolean = + refs.elems.exists { + case ref: TermRef => toAvoid(ref) + case _ => false + } + override def apply(tp: Type): Type = tp match case tp: TypeVar if mapCtx.typerState.constraint.contains(tp) => val lo = TypeComparer.instanceType( @@ -601,7 +622,7 @@ object TypeOps: boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type)( - using Context): List[BoundsViolation] = withMode(Mode.CheckBounds) { + using Context): List[BoundsViolation] = withMode(Mode.CheckBoundsOrSelfType) { val argTypes = args.tpes /** Replace all wildcards in `tps` with `#` where `` is the @@ -666,8 +687,8 @@ object TypeOps: val bound1 = massage(bound) if (bound1 ne bound) { if (checkCtx eq ctx) checkCtx = ctx.fresh.setFreshGADTBounds - if (!checkCtx.gadt.contains(sym)) checkCtx.gadt.addToConstraint(sym) - checkCtx.gadt.addBound(sym, bound1, fromBelow) + if (!checkCtx.gadt.contains(sym)) checkCtx.gadtState.addToConstraint(sym) + checkCtx.gadtState.addBound(sym, bound1, fromBelow) typr.println("install GADT bound $bound1 for when checking F-bounded $sym") } } @@ -718,7 +739,7 @@ object TypeOps: * If the subtyping is true, the instantiated type `p.child[Vs]` is * returned. Otherwise, `NoType` is returned. */ - def refineUsingParent(parent: Type, child: Symbol)(using Context): Type = { + def refineUsingParent(parent: Type, child: Symbol, mixins: List[Type] = Nil)(using Context): Type = { // is a place holder from Scalac, it is hopeless to instantiate it. // // Quote from scalac (from nsc/symtab/classfile/Pickler.scala): @@ -733,7 +754,7 @@ object TypeOps: val childTp = if (child.isTerm) child.termRef else child.typeRef inContext(ctx.fresh.setExploreTyperState().setFreshGADTBounds.addMode(Mode.GadtConstraintInference)) { - instantiateToSubType(childTp, parent).dealias + instantiateToSubType(childTp, parent, mixins).dealias } } @@ -744,7 +765,7 @@ object TypeOps: * * Otherwise, return NoType. */ - private def instantiateToSubType(tp1: NamedType, tp2: Type)(using Context): Type = { + private def instantiateToSubType(tp1: NamedType, tp2: Type, mixins: List[Type])(using Context): Type = trace(i"instantiateToSubType($tp1, $tp2, $mixins)", typr) { // In order for a child type S to qualify as a valid subtype of the parent // T, we need to test whether it is possible S <: T. // @@ -770,20 +791,15 @@ object TypeOps: tref case tp: TypeRef if !tp.symbol.isClass => - def lo = LazyRef.of(apply(tp.underlying.loBound)) - def hi = LazyRef.of(apply(tp.underlying.hiBound)) val lookup = boundTypeParams.lookup(tp) if lookup != null then lookup else - val tv = newTypeVar(TypeBounds(lo, hi)) + val TypeBounds(lo, hi) = tp.underlying.bounds + val tv = newTypeVar(TypeBounds(defn.NothingType, hi.topType)) boundTypeParams(tp) = tv - // Force lazy ref eagerly using current context - // Otherwise, the lazy ref will be forced with a unknown context, - // which causes a problem in tests/patmat/i3645e.scala - lo.ref - hi.ref + assert(tv <:< apply(hi)) + apply(lo) <:< tv // no assert, since bounds might conflict tv - end if case tp @ AppliedType(tycon: TypeRef, _) if !tycon.dealias.typeSymbol.isClass && !tp.isMatchAlias => @@ -823,22 +839,57 @@ object TypeOps: } } - // Prefix inference, replace `p.C.this.Child` with `X.Child` where `X <: p.C` - // Note: we need to strip ThisType in `p` recursively. + /** Gather GADT symbols and `ThisType`s found in `tp2`, ie. the scrutinee. */ + object TraverseTp2 extends TypeTraverser: + val thisTypes = util.HashSet[ThisType]() + val gadtSyms = new mutable.ListBuffer[Symbol] + + def traverse(tp: Type) = { + val tpd = tp.dealias + if tpd ne tp then traverse(tpd) + else tp match + case tp: ThisType if !tp.tref.symbol.isStaticOwner && !thisTypes.contains(tp) => + thisTypes += tp + traverseChildren(tp.tref) + case tp: TypeRef if tp.symbol.isAbstractOrParamType => + gadtSyms += tp.symbol + traverseChildren(tp) + val owners = Iterator.iterate(tp.symbol)(_.maybeOwner).takeWhile(_.exists) + for sym <- owners do + // add ThisType's for the classes symbols in the ownership of `tp` + // for example, i16451.CanForward.scala, add `Namer.this`, as one of the owners of the type parameter `A1` + if sym.isClass && !sym.isAnonymousClass && !sym.isStaticOwner then + traverse(sym.thisType) + case _ => + traverseChildren(tp) + } + TraverseTp2.traverse(tp2) + val thisTypes = TraverseTp2.thisTypes + val gadtSyms = TraverseTp2.gadtSyms.toList + + // Prefix inference, given `p.C.this.Child`: + // 1. return it as is, if `C.this` is found in `tp`, i.e. the scrutinee; or + // 2. replace it with `X.Child` where `X <: p.C`, stripping ThisType in `p` recursively. // - // See tests/patmat/i3938.scala + // See tests/patmat/i3938.scala, tests/pos/i15029.more.scala, tests/pos/i16785.scala class InferPrefixMap extends TypeMap { var prefixTVar: Type | Null = null def apply(tp: Type): Type = tp match { - case ThisType(tref: TypeRef) if !tref.symbol.isStaticOwner => - if (tref.symbol.is(Module)) - TermRef(this(tref.prefix), tref.symbol.sourceModule) + case tp @ ThisType(tref) if !tref.symbol.isStaticOwner => + val symbol = tref.symbol + if thisTypes.contains(tp) then + prefixTVar = tp // e.g. tests/pos/i16785.scala, keep Outer.this + prefixTVar.uncheckedNN + else if symbol.is(Module) then + TermRef(this(tref.prefix), symbol.sourceModule) else if (prefixTVar != null) this(tref) else { prefixTVar = WildcardType // prevent recursive call from assigning it - val tref2 = this(tref.applyIfParameterized(tref.typeParams.map(_ => TypeBounds.empty))) - prefixTVar = newTypeVar(TypeBounds.upper(tref2)) + // e.g. tests/pos/i15029.more.scala, create a TypeVar for `Instances`' B, so we can disregard `Ints` + val tvars = tref.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } + val tref2 = this(tref.applyIfParameterized(tvars)) + prefixTVar = newTypeVar(TypeBounds.upper(tref2), DepParamName.fresh(tref.name)) prefixTVar.uncheckedNN } case tp => mapOver(tp) @@ -846,15 +897,11 @@ object TypeOps: } val inferThisMap = new InferPrefixMap - val tvars = tp1.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds) } + val tvars = tp1.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } val protoTp1 = inferThisMap.apply(tp1).appliedTo(tvars) - val getAbstractSymbols = new TypeAccumulator[List[Symbol]]: - def apply(xs: List[Symbol], tp: Type) = tp.dealias match - case tp: TypeRef if tp.symbol.exists && !tp.symbol.isClass => foldOver(tp.symbol :: xs, tp) - case tp => foldOver(xs, tp) - val syms2 = getAbstractSymbols(Nil, tp2).reverse - if syms2.nonEmpty then ctx.gadt.addToConstraint(syms2) + if gadtSyms.nonEmpty then + ctx.gadtState.addToConstraint(gadtSyms) // If parent contains a reference to an abstract type, then we should // refine subtype checking to eliminate abstract types according to @@ -866,6 +913,7 @@ object TypeOps: } def instantiate(): Type = { + for tp <- mixins.reverseIterator do protoTp1 <:< tp maximizeType(protoTp1, NoSpan) wildApprox(protoTp1) } diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 483745928f25..fe0fc8a6dc2d 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -43,6 +43,7 @@ import scala.annotation.internal.sharable import scala.annotation.threadUnsafe import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.TypeUtils.isErasedClass object Types { @@ -118,10 +119,9 @@ object Types { if t.mightBeProvisional then t.mightBeProvisional = t match case t: TypeRef => - !t.currentSymbol.isStatic && { + t.currentSymbol.isProvisional || !t.currentSymbol.isStatic && { (t: Type).mightBeProvisional = false // break cycles - t.symbol.isProvisional - || test(t.prefix, theAcc) + test(t.prefix, theAcc) || t.denot.infoOrCompleter.match case info: LazyType => true case info: AliasingBounds => test(info.alias, theAcc) @@ -397,6 +397,10 @@ object Types { def isRepeatedParam(using Context): Boolean = typeSymbol eq defn.RepeatedParamClass + /** Is this a parameter type that allows implicit argument converson? */ + def isConvertibleParam(using Context): Boolean = + typeSymbol eq defn.IntoType + /** Is this the type of a method that has a repeated parameter type as * last parameter type? */ @@ -422,7 +426,7 @@ object Types { def isContextualMethod: Boolean = false /** Is this a MethodType for which the parameters will not be used? */ - def isErasedMethod: Boolean = false + def hasErasedParams(using Context): Boolean = false /** Is this a match type or a higher-kinded abstraction of one? */ @@ -442,14 +446,6 @@ object Types { final def containsWildcardTypes(using Context) = existsPart(_.isInstanceOf[WildcardType], StopAt.Static, forceLazy = false) - /** Does this type contain LazyRef types? */ - final def containsLazyRefs(using Context) = - val acc = new TypeAccumulator[Boolean]: - def apply(x: Boolean, tp: Type): Boolean = tp match - case _: LazyRef => true - case _ => x || foldOver(x, tp) - acc(false, this) - // ----- Higher-order combinators ----------------------------------- /** Returns true if there is a part of this type that satisfies predicate `p`. @@ -544,7 +540,7 @@ object Types { case tp: ClassInfo => tp.cls :: Nil case AndType(l, r) => - l.parentSymbols(include) | r.parentSymbols(include) + l.parentSymbols(include).setUnion(r.parentSymbols(include)) case OrType(l, r) => l.parentSymbols(include) intersect r.parentSymbols(include) // TODO does not conform to spec case _ => @@ -753,16 +749,6 @@ object Types { // which means that we always defensively copy the type in the future. This second // measure is necessary because findMember calls might be cached, so do not // necessarily appear in nested order. - // Without the defensive copy, Typer.scala fails to compile at the line - // - // untpd.rename(lhsCore, setterName).withType(setterType), WildcardType) - // - // because the subtype check - // - // ThisTree[Untyped]#ThisTree[Typed] <: Tree[Typed] - // - // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.) - // // Without the `openedTwice` trick, Typer.scala fails to Ycheck // at phase resolveSuper. val rt = @@ -783,11 +769,11 @@ object Types { val rinfo = tp.refinedInfo if (name.isTypeName && !pinfo.isInstanceOf[ClassInfo]) { // simplified case that runs more efficiently val jointInfo = - if rinfo.isInstanceOf[TypeAlias] && !ctx.mode.is(Mode.CheckBounds) then + if rinfo.isInstanceOf[TypeAlias] && !ctx.mode.is(Mode.CheckBoundsOrSelfType) then // In normal situations, the only way to "improve" on rinfo is to return an empty type bounds // So, we do not lose anything essential in "widening" to rinfo. // We need to compute the precise info only when checking for empty bounds - // which is communicated by the CheckBounds mode. + // which is communicated by the CheckBoundsOrSelfType mode. rinfo else if ctx.base.pendingMemberSearches.contains(name) then pinfo safe_& rinfo @@ -1085,12 +1071,15 @@ object Types { * @param relaxedCheck if true type `Null` becomes a subtype of non-primitive value types in TypeComparer. * @param matchLoosely if true the types `=> T` and `()T` are seen as overriding each other. * @param checkClassInfo if true we check that ClassInfos are within bounds of abstract types + * + * @param isSubType a function used for checking subtype relationships. */ - final def overrides(that: Type, relaxedCheck: Boolean, matchLoosely: => Boolean, checkClassInfo: Boolean = true)(using Context): Boolean = { + final def overrides(that: Type, relaxedCheck: Boolean, matchLoosely: => Boolean, checkClassInfo: Boolean = true, + isSubType: (Type, Type) => Context ?=> Boolean = (tp1, tp2) => tp1 frozen_<:< tp2)(using Context): Boolean = { val overrideCtx = if relaxedCheck then ctx.relaxedOverrideContext else ctx inContext(overrideCtx) { !checkClassInfo && this.isInstanceOf[ClassInfo] - || (this.widenExpr frozen_<:< that.widenExpr) + || isSubType(this.widenExpr, that.widenExpr) || matchLoosely && { val this1 = this.widenNullaryMethod val that1 = that.widenNullaryMethod @@ -1192,7 +1181,8 @@ object Types { /** Remove all AnnotatedTypes wrapping this type. */ - def stripAnnots(using Context): Type = this + def stripAnnots(keep: Annotation => Context ?=> Boolean)(using Context): Type = this + final def stripAnnots(using Context): Type = stripAnnots(_ => false) /** Strip TypeVars and Annotation and CapturingType wrappers */ def stripped(using Context): Type = this @@ -1295,11 +1285,14 @@ object Types { * then the top-level union isn't widened. This is needed so that type inference can infer nullable types. */ def widenUnion(using Context): Type = widen match - case tp @ OrNull(tp1): OrType => - // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. - val tp1Widen = tp1.widenUnionWithoutNull - if (tp1Widen.isRef(defn.AnyClass)) tp1Widen - else tp.derivedOrType(tp1Widen, defn.NullType) + case tp: OrType => tp match + case OrNull(tp1) => + // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. + val tp1Widen = tp1.widenUnionWithoutNull + if (tp1Widen.isRef(defn.AnyClass)) tp1Widen + else tp.derivedOrType(tp1Widen, defn.NullType) + case _ => + tp.widenUnionWithoutNull case tp => tp.widenUnionWithoutNull @@ -1479,7 +1472,7 @@ object Types { /** Dealias, and if result is a dependent function type, drop the `apply` refinement. */ final def dropDependentRefinement(using Context): Type = dealias match { - case RefinedType(parent, nme.apply, _) => parent + case RefinedType(parent, nme.apply, mt) if defn.isNonRefinedFunction(parent) => parent case tp => tp } @@ -1582,8 +1575,6 @@ object Types { else NoType case SkolemType(tp) => loop(tp) - case pre: WildcardType => - WildcardType case pre: TypeRef => pre.info match { case TypeAlias(alias) => loop(alias) @@ -1721,6 +1712,8 @@ object Types { else NoType case t if defn.isNonRefinedFunction(t) => t + case t if defn.isErasedFunctionType(t) => + t case t @ SAMType(_) => t case _ => @@ -1848,15 +1841,15 @@ object Types { case mt: MethodType if !mt.isParamDependent => val formals1 = if (dropLast == 0) mt.paramInfos else mt.paramInfos dropRight dropLast val isContextual = mt.isContextualMethod && !ctx.erasedTypes - val isErased = mt.isErasedMethod && !ctx.erasedTypes val result1 = mt.nonDependentResultApprox match { case res: MethodType => res.toFunctionType(isJava) case res => res } val funType = defn.FunctionOf( formals1 mapConserve (_.translateFromRepeated(toArray = isJava)), - result1, isContextual, isErased) - if alwaysDependent || mt.isResultDependent then RefinedType(funType, nme.apply, mt) + result1, isContextual) + if alwaysDependent || mt.isResultDependent then + RefinedType(funType, nme.apply, mt) else funType } @@ -1879,8 +1872,16 @@ object Types { def dropRepeatedAnnot(using Context): Type = dropAnnot(defn.RepeatedAnnot) + /** A translation from types of original parameter ValDefs to the types + * of parameters in MethodTypes. + * Translates `Seq[T] @repeated` or `Array[T] @repeated` to `[T]`. + * That way, repeated arguments are made manifest without risk of dropped annotations. + */ def annotatedToRepeated(using Context): Type = this match { - case tp @ ExprType(tp1) => tp.derivedExprType(tp1.annotatedToRepeated) + case tp @ ExprType(tp1) => + tp.derivedExprType(tp1.annotatedToRepeated) + case self @ AnnotatedType(tp, annot) if annot matches defn.RetainsByNameAnnot => + self.derivedAnnotatedType(tp.annotatedToRepeated, annot) case AnnotatedType(tp, annot) if annot matches defn.RepeatedAnnot => val typeSym = tp.typeSymbol.asClass assert(typeSym == defn.SeqClass || typeSym == defn.ArrayClass) @@ -2182,7 +2183,7 @@ object Types { // --- NamedTypes ------------------------------------------------------------------ - abstract class NamedType extends CachedProxyType, ValueType { self => + abstract class NamedType extends CachedProxyType, ValueType, Product { self => type ThisType >: this.type <: NamedType type ThisName <: Name @@ -2190,8 +2191,10 @@ object Types { val prefix: Type def designator: Designator protected def designator_=(d: Designator): Unit + def _1: Type + def _2: Designator - assert(prefix.isValueType || (prefix eq NoPrefix), s"invalid prefix $prefix") + assert(NamedType.validPrefix(prefix), s"invalid prefix $prefix") private var myName: Name | Null = null private var lastDenotation: Denotation | Null = null @@ -2266,15 +2269,17 @@ object Types { final def symbol(using Context): Symbol = // We can rely on checkedPeriod (unlike in the definition of `denot` below) // because SymDenotation#installAfter never changes the symbol - if (checkedPeriod == ctx.period) lastSymbol.nn else computeSymbol + if (checkedPeriod.code == ctx.period.code) lastSymbol.asInstanceOf[Symbol] + else computeSymbol private def computeSymbol(using Context): Symbol = - designator match { + val result = designator match case sym: Symbol => if (sym.isValidInCurrentRun) sym else denot.symbol case name => - (if (denotationIsCurrent) lastDenotation.nn else denot).symbol - } + (if (denotationIsCurrent) lastDenotation.asInstanceOf[Denotation] else denot).symbol + if checkedPeriod.code != NowhereCode then checkedPeriod = ctx.period + result /** There is a denotation computed which is valid (somewhere in) the * current run. @@ -2306,18 +2311,16 @@ object Types { def info(using Context): Type = denot.info - /** The denotation currently denoted by this type */ - final def denot(using Context): Denotation = { + /** The denotation currently denoted by this type. Extremely hot. Carefully optimized + * to be as small as possible. + */ + final def denot(using Context): Denotation = util.Stats.record("NamedType.denot") - val now = ctx.period + val lastd = lastDenotation.asInstanceOf[Denotation] // Even if checkedPeriod == now we still need to recheck lastDenotation.validFor // as it may have been mutated by SymDenotation#installAfter - if (checkedPeriod != Nowhere && lastDenotation.nn.validFor.contains(now)) { - checkedPeriod = now - lastDenotation.nn - } + if checkedPeriod.code != NowhereCode && lastd.validFor.contains(ctx.period) then lastd else computeDenot - } private def computeDenot(using Context): Denotation = { util.Stats.record("NamedType.computeDenot") @@ -2353,10 +2356,11 @@ object Types { lastDenotation match { case lastd0: SingleDenotation => val lastd = lastd0.skipRemoved - if (lastd.validFor.runId == ctx.runId && (checkedPeriod != Nowhere)) finish(lastd.current) + if lastd.validFor.runId == ctx.runId && checkedPeriod.code != NowhereCode then + finish(lastd.current) else lastd match { case lastd: SymDenotation => - if (stillValid(lastd) && (checkedPeriod != Nowhere)) finish(lastd.current) + if stillValid(lastd) && checkedPeriod.code != NowhereCode then finish(lastd.current) else finish(memberDenot(lastd.initial.name, allowPrivate = false)) case _ => fromDesignator @@ -2425,12 +2429,12 @@ object Types { } else { if (!ctx.reporter.errorsReported) - throw new TypeError( - i"""bad parameter reference $this at ${ctx.phase} - |the parameter is ${param.showLocated} but the prefix $prefix - |does not define any corresponding arguments. - |idx = $idx, args = $args%, %, - |constraint = ${ctx.typerState.constraint}""") + throw TypeError( + em"""bad parameter reference $this at ${ctx.phase} + |the parameter is ${param.showLocated} but the prefix $prefix + |does not define any corresponding arguments. + |idx = $idx, args = $args%, %, + |constraint = ${ctx.typerState.constraint}""") NoDenotation } } @@ -2442,9 +2446,8 @@ object Types { setDenot(memberDenot(name, allowPrivate = !symbol.exists || symbol.is(Private))) private def setDenot(denot: Denotation)(using Context): Unit = { - if (Config.checkNoDoubleBindings) - if (ctx.settings.YnoDoubleBindings.value) - checkSymAssign(denot.symbol) + if ctx.base.checkNoDoubleBindings then + checkSymAssign(denot.symbol) lastDenotation = denot lastSymbol = denot.symbol @@ -2458,6 +2461,8 @@ object Types { } private def checkDenot()(using Context) = {} + //if name.toString == "getConstructor" then + // println(i"set denot of $this to ${denot.info}, ${denot.getClass}, ${Phases.phaseOf(denot.validFor.lastPhaseId)} at ${ctx.phase}") private def checkSymAssign(sym: Symbol)(using Context) = { def selfTypeOf(sym: Symbol) = @@ -2499,10 +2504,48 @@ object Types { /** A reference with the initial symbol in `symd` has an info that * might depend on the given prefix. + * Note: If M is an abstract type or non-final term member in trait or class C, + * its info depends even on C.this if class C has a self type that refines + * the info of M. */ private def infoDependsOnPrefix(symd: SymDenotation, prefix: Type)(using Context): Boolean = + + def refines(tp: Type, name: Name): Boolean = tp match + case tp: TypeRef => + tp.symbol match + case cls: ClassSymbol => + val otherd = cls.nonPrivateMembersNamed(name) + otherd.exists && !otherd.containsSym(symd.symbol) + case tsym => + refines(tsym.info.hiBound, name) + // avoid going through tp.denot, since that might call infoDependsOnPrefix again + case RefinedType(parent, rname, _) => + rname == name || refines(parent, name) + case tp: TypeProxy => + refines(tp.underlying, name) + case AndType(tp1, tp2) => + refines(tp1, name) || refines(tp2, name) + case _ => + false + + def givenSelfTypeOrCompleter(cls: Symbol) = cls.infoOrCompleter match + case cinfo: ClassInfo => + cinfo.selfInfo match + case sym: Symbol => sym.infoOrCompleter + case tpe: Type => tpe + case _ => NoType + symd.maybeOwner.membersNeedAsSeenFrom(prefix) && !symd.is(NonMember) - || prefix.isInstanceOf[Types.ThisType] && symd.is(Opaque) // see pos/i11277.scala for a test where this matters + || prefix.match + case prefix: Types.ThisType => + (symd.isAbstractType + || symd.isTerm + && !symd.flagsUNSAFE.isOneOf(Module | Final | Param) + && !symd.maybeOwner.isEffectivelyFinal) + && prefix.sameThis(symd.maybeOwner.thisType) + && refines(givenSelfTypeOrCompleter(prefix.cls), symd.name) + case _ => false + end infoDependsOnPrefix /** Is this a reference to a class or object member with an info that might depend * on the prefix? @@ -2512,10 +2555,7 @@ object Types { case _ => true } - /** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type - * to an (unbounded) wildcard type. - * - * (2) Reduce a type-ref `T { X = U; ... } # X` to `U` + /** Reduce a type-ref `T { X = U; ... } # X` to `U` * provided `U` does not refer with a RecThis to the * refinement type `T { X = U; ... }` */ @@ -2637,45 +2677,33 @@ object Types { case _ => } } - if (prefix.isInstanceOf[WildcardType]) WildcardType + if (prefix.isInstanceOf[WildcardType]) WildcardType.sameKindAs(this) else withPrefix(prefix) } /** A reference like this one, but with the given symbol, if it exists */ - final def withSym(sym: Symbol)(using Context): ThisType = - if ((designator ne sym) && sym.exists) NamedType(prefix, sym).asInstanceOf[ThisType] + private def withSym(sym: Symbol)(using Context): ThisType = + if designator ne sym then NamedType(prefix, sym).asInstanceOf[ThisType] + else this + + private def withName(name: Name)(using Context): ThisType = + if designator ne name then NamedType(prefix, name).asInstanceOf[ThisType] else this /** A reference like this one, but with the given denotation, if it exists. - * Returns a new named type with the denotation's symbol if that symbol exists, and - * one of the following alternatives applies: - * 1. The current designator is a symbol and the symbols differ, or - * 2. The current designator is a name and the new symbolic named type - * does not have a currently known denotation. - * 3. The current designator is a name and the new symbolic named type - * has the same info as the current info - * Otherwise the current denotation is overwritten with the given one. - * - * Note: (2) and (3) are a "lock in mechanism" where a reference with a name as - * designator can turn into a symbolic reference. - * - * Note: This is a subtle dance to keep the balance between going to symbolic - * references as much as we can (since otherwise we'd risk getting cycles) - * and to still not lose any type info in the denotation (since symbolic - * references often recompute their info directly from the symbol's info). - * A test case is neg/opaque-self-encoding.scala. + * Returns a new named type with the denotation's symbol as designator + * if that symbol exists and it is different from the current designator. + * Returns a new named type with the denotations's name as designator + * if the denotation is overloaded and its name is different from the + * current designator. */ final def withDenot(denot: Denotation)(using Context): ThisType = if denot.exists then - val adapted = withSym(denot.symbol) - val result = - if (adapted.eq(this) - || designator.isInstanceOf[Symbol] - || !adapted.denotationIsCurrent - || adapted.info.eq(denot.info)) - adapted + val adapted = + if denot.symbol.exists then withSym(denot.symbol) + else if denot.isOverloaded then withName(denot.name) else this - val lastDenot = result.lastDenotation + val lastDenot = adapted.lastDenotation denot match case denot: SymDenotation if denot.validFor.firstPhaseId < ctx.phase.id @@ -2685,20 +2713,20 @@ object Types { // In this case the new SymDenotation might be valid for all phases, which means // we would not recompute the denotation when travelling to an earlier phase, maybe // in the next run. We fix that problem by creating a UniqueRefDenotation instead. - core.println(i"overwrite ${result.toString} / ${result.lastDenotation}, ${result.lastDenotation.getClass} with $denot at ${ctx.phaseId}") - result.setDenot( + core.println(i"overwrite ${adapted.toString} / ${adapted.lastDenotation}, ${adapted.lastDenotation.getClass} with $denot at ${ctx.phaseId}") + adapted.setDenot( UniqueRefDenotation( denot.symbol, denot.info, Period(ctx.runId, ctx.phaseId, denot.validFor.lastPhaseId), this.prefix)) case _ => - result.setDenot(denot) - result.asInstanceOf[ThisType] + adapted.setDenot(denot) + adapted.asInstanceOf[ThisType] else // don't assign NoDenotation, we might need to recover later. Test case is pos/avoid.scala. this /** A reference like this one, but with the given prefix. */ - final def withPrefix(prefix: Type)(using Context): NamedType = { + final def withPrefix(prefix: Type)(using Context): Type = { def reload(): NamedType = { val lastSym = lastSymbol.nn val allowPrivate = !lastSym.exists || lastSym.is(Private) @@ -2711,6 +2739,7 @@ object Types { NamedType(prefix, name, d) } if (prefix eq this.prefix) this + else if !NamedType.validPrefix(prefix) then UnspecifiedErrorType else if (lastDenotation == null) NamedType(prefix, designator) else designator match { case sym: Symbol => @@ -2795,7 +2824,7 @@ object Types { ((prefix eq NoPrefix) || symbol.is(ParamAccessor) && (prefix eq symbol.owner.thisType) || isRootCapability - ) && !symbol.is(Method) + ) && !symbol.isOneOf(UnstableValueFlags) override def isRootCapability(using Context): Boolean = name == nme.CAPTURE_ROOT && symbol == defn.captureRoot @@ -2902,6 +2931,9 @@ object Types { def apply(prefix: Type, designator: Name, denot: Denotation)(using Context): NamedType = if (designator.isTermName) TermRef.apply(prefix, designator.asTermName, denot) else TypeRef.apply(prefix, designator.asTypeName, denot) + def unapply(tp: NamedType): NamedType = tp + + def validPrefix(prefix: Type): Boolean = prefix.isValueType || (prefix eq NoPrefix) } object TermRef { @@ -3316,11 +3348,11 @@ object Types { final class CachedAndType(tp1: Type, tp2: Type) extends AndType(tp1, tp2) object AndType { - def apply(tp1: Type, tp2: Type)(using Context): AndType = { - assert(tp1.isValueTypeOrWildcard && - tp2.isValueTypeOrWildcard, i"$tp1 & $tp2 / " + s"$tp1 & $tp2") + def apply(tp1: Type, tp2: Type)(using Context): AndType = + def where = i"in intersection $tp1 & $tp2" + expectValueTypeOrWildcard(tp1, where) + expectValueTypeOrWildcard(tp2, where) unchecked(tp1, tp2) - } def balanced(tp1: Type, tp2: Type)(using Context): AndType = tp1 match @@ -3360,7 +3392,7 @@ object Types { TypeComparer.liftIfHK(tp1, tp2, AndType.make(_, _, checkValid = false), makeHk, _ | _) } - abstract case class OrType(tp1: Type, tp2: Type) extends AndOrType { + abstract case class OrType protected(tp1: Type, tp2: Type) extends AndOrType { def isAnd: Boolean = false def isSoft: Boolean private var myBaseClassesPeriod: Period = Nowhere @@ -3393,9 +3425,6 @@ object Types { myFactorCount else 1 - assert(tp1.isValueTypeOrWildcard && - tp2.isValueTypeOrWildcard, s"$tp1 $tp2") - private var myJoin: Type = _ private var myJoinPeriod: Period = Nowhere @@ -3428,25 +3457,29 @@ object Types { private var myAtoms: Atoms = _ private var myWidened: Type = _ + private def computeAtoms()(using Context): Atoms = + if tp1.hasClassSymbol(defn.NothingClass) then tp2.atoms + else if tp2.hasClassSymbol(defn.NothingClass) then tp1.atoms + else tp1.atoms | tp2.atoms + + private def computeWidenSingletons()(using Context): Type = + val tp1w = tp1.widenSingletons + val tp2w = tp2.widenSingletons + if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) + private def ensureAtomsComputed()(using Context): Unit = if atomsRunId != ctx.runId then - myAtoms = - if tp1.hasClassSymbol(defn.NothingClass) then tp2.atoms - else if tp2.hasClassSymbol(defn.NothingClass) then tp1.atoms - else tp1.atoms | tp2.atoms - val tp1w = tp1.widenSingletons - val tp2w = tp2.widenSingletons - myWidened = if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) - atomsRunId = ctx.runId + myAtoms = computeAtoms() + myWidened = computeWidenSingletons() + if !isProvisional then atomsRunId = ctx.runId override def atoms(using Context): Atoms = ensureAtomsComputed() myAtoms - override def widenSingletons(using Context): Type = { + override def widenSingletons(using Context): Type = ensureAtomsComputed() myWidened - } def derivedOrType(tp1: Type, tp2: Type, soft: Boolean = isSoft)(using Context): Type = if ((tp1 eq this.tp1) && (tp2 eq this.tp2) && soft == isSoft) this @@ -3466,6 +3499,9 @@ object Types { object OrType { def apply(tp1: Type, tp2: Type, soft: Boolean)(using Context): OrType = { + def where = i"in union $tp1 | $tp2" + expectValueTypeOrWildcard(tp1, where) + expectValueTypeOrWildcard(tp2, where) assertUnerased() unique(new CachedOrType(tp1, tp2, soft)) } @@ -3496,6 +3532,11 @@ object Types { TypeComparer.liftIfHK(tp1, tp2, OrType(_, _, soft = true), makeHk, _ & _) } + def expectValueTypeOrWildcard(tp: Type, where: => String)(using Context): Unit = + if !tp.isValueTypeOrWildcard then + assert(!ctx.isAfterTyper, where) // we check correct kinds at PostTyper + throw TypeError(em"$tp is not a value type, cannot be used $where") + /** An extractor object to pattern match against a nullable union. * e.g. * @@ -3606,12 +3647,18 @@ object Types { def companion: LambdaTypeCompanion[ThisName, PInfo, This] + def erasedParams(using Context) = List.fill(paramInfos.size)(false) + /** The type `[tparams := paramRefs] tp`, where `tparams` can be * either a list of type parameter symbols or a list of lambda parameters + * + * @pre If `tparams` is a list of lambda parameters, then it must be the + * full, in-order list of type parameters of some type constructor, as + * can be obtained using `TypeApplications#typeParams`. */ def integrate(tparams: List[ParamInfo], tp: Type)(using Context): Type = (tparams: @unchecked) match { - case LambdaParam(lam, _) :: _ => tp.subst(lam, this) + case LambdaParam(lam, _) :: _ => tp.subst(lam, this) // This is where the precondition is necessary. case params: List[Symbol @unchecked] => tp.subst(params, paramRefs) } @@ -3679,7 +3726,11 @@ object Types { else Signature(tp, sourceLanguage) this match case tp: MethodType => - val params = if (isErasedMethod) Nil else tp.paramInfos + val params = if (hasErasedParams) + tp.paramInfos + .zip(tp.erasedParams) + .collect { case (param, isErased) if !isErased => param } + else tp.paramInfos resultSignature.prependTermParams(params, sourceLanguage) case tp: PolyType => resultSignature.prependTypeParams(tp.paramNames.length) @@ -3886,16 +3937,14 @@ object Types { def companion: MethodTypeCompanion final override def isImplicitMethod: Boolean = - companion.eq(ImplicitMethodType) || - companion.eq(ErasedImplicitMethodType) || - isContextualMethod - final override def isErasedMethod: Boolean = - companion.eq(ErasedMethodType) || - companion.eq(ErasedImplicitMethodType) || - companion.eq(ErasedContextualMethodType) + companion.eq(ImplicitMethodType) || isContextualMethod + final override def hasErasedParams(using Context): Boolean = + erasedParams.contains(true) final override def isContextualMethod: Boolean = - companion.eq(ContextualMethodType) || - companion.eq(ErasedContextualMethodType) + companion.eq(ContextualMethodType) + + override def erasedParams(using Context): List[Boolean] = + paramInfos.map(p => p.hasAnnotation(defn.ErasedParamAnnot)) protected def prefixString: String = companion.prefixString } @@ -3949,29 +3998,50 @@ object Types { * and inline parameters: * - replace @repeated annotations on Seq or Array types by types * - add @inlineParam to inline parameters + * - add @erasedParam to erased parameters + * - wrap types of parameters that have an @allowConversions annotation with Into[_] */ - def fromSymbols(params: List[Symbol], resultType: Type)(using Context): MethodType = { - def translateInline(tp: Type): Type = tp match { - case ExprType(resType) => ExprType(AnnotatedType(resType, Annotation(defn.InlineParamAnnot))) - case _ => AnnotatedType(tp, Annotation(defn.InlineParamAnnot)) - } - def translateErased(tp: Type): Type = tp match { - case ExprType(resType) => ExprType(AnnotatedType(resType, Annotation(defn.ErasedParamAnnot))) - case _ => AnnotatedType(tp, Annotation(defn.ErasedParamAnnot)) - } - def paramInfo(param: Symbol) = { + def fromSymbols(params: List[Symbol], resultType: Type)(using Context): MethodType = + def addAnnotation(tp: Type, cls: ClassSymbol, param: Symbol): Type = tp match + case ExprType(resType) => ExprType(addAnnotation(resType, cls, param)) + case _ => AnnotatedType(tp, Annotation(cls, param.span)) + + def wrapConvertible(tp: Type) = + AppliedType(defn.IntoType.typeRef, tp :: Nil) + + /** Add `Into[..] to the type itself and if it is a function type, to all its + * curried result type(s) as well. + */ + def addInto(tp: Type): Type = tp match + case tp @ AppliedType(tycon, args) if tycon.typeSymbol == defn.RepeatedParamClass => + tp.derivedAppliedType(tycon, addInto(args.head) :: Nil) + case tp @ AppliedType(tycon, args) if defn.isFunctionType(tp) => + wrapConvertible(tp.derivedAppliedType(tycon, args.init :+ addInto(args.last))) + case tp @ RefinedType(parent, rname, rinfo) if defn.isFunctionOrPolyType(tp) => + wrapConvertible(tp.derivedRefinedType(parent, rname, addInto(rinfo))) + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = addInto(tp.resType)) + case ExprType(resType) => + ExprType(addInto(resType)) + case _ => + wrapConvertible(tp) + + def paramInfo(param: Symbol) = var paramType = param.info.annotatedToRepeated - if (param.is(Inline)) paramType = translateInline(paramType) - if (param.is(Erased)) paramType = translateErased(paramType) + if param.is(Inline) then + paramType = addAnnotation(paramType, defn.InlineParamAnnot, param) + if param.is(Erased) then + paramType = addAnnotation(paramType, defn.ErasedParamAnnot, param) + if param.hasAnnotation(defn.AllowConversionsAnnot) then + paramType = addInto(paramType) paramType - } apply(params.map(_.name.asTermName))( tl => params.map(p => tl.integrate(params, paramInfo(p))), tl => tl.integrate(params, resultType)) - } + end fromSymbols - final def apply(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type)(using Context): MethodType = + def apply(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type)(using Context): MethodType = checkValid(unique(new CachedMethodType(paramNames)(paramInfosExp, resultTypeExp, self))) def checkValid(mt: MethodType)(using Context): mt.type = { @@ -3986,19 +4056,14 @@ object Types { } object MethodType extends MethodTypeCompanion("MethodType") { - def companion(isContextual: Boolean = false, isImplicit: Boolean = false, isErased: Boolean = false): MethodTypeCompanion = - if (isContextual) - if (isErased) ErasedContextualMethodType else ContextualMethodType - else if (isImplicit) - if (isErased) ErasedImplicitMethodType else ImplicitMethodType - else - if (isErased) ErasedMethodType else MethodType + def companion(isContextual: Boolean = false, isImplicit: Boolean = false): MethodTypeCompanion = + if (isContextual) ContextualMethodType + else if (isImplicit) ImplicitMethodType + else MethodType } - object ErasedMethodType extends MethodTypeCompanion("ErasedMethodType") + object ContextualMethodType extends MethodTypeCompanion("ContextualMethodType") - object ErasedContextualMethodType extends MethodTypeCompanion("ErasedContextualMethodType") object ImplicitMethodType extends MethodTypeCompanion("ImplicitMethodType") - object ErasedImplicitMethodType extends MethodTypeCompanion("ErasedImplicitMethodType") /** A ternary extractor for MethodType */ object MethodTpe { @@ -4239,7 +4304,7 @@ object Types { final val Unknown: DependencyStatus = 0 // not yet computed final val NoDeps: DependencyStatus = 1 // no dependent parameters found final val FalseDeps: DependencyStatus = 2 // all dependent parameters are prefixes of non-depended alias types - final val CaptureDeps: DependencyStatus = 3 // dependencies in capture sets under -Ycc, otherwise only false dependencoes + final val CaptureDeps: DependencyStatus = 3 // dependencies in capture sets under captureChecking, otherwise only false dependencoes final val TrueDeps: DependencyStatus = 4 // some truly dependent parameters exist final val StatusMask: DependencyStatus = 7 // the bits indicating actual dependency status final val Provisional: DependencyStatus = 8 // set if dependency status can still change due to type variable instantiations @@ -4742,7 +4807,7 @@ object Types { def hasLowerBound(using Context): Boolean = !currentEntry.loBound.isExactlyNothing /** For uninstantiated type variables: Is the upper bound different from Any? */ - def hasUpperBound(using Context): Boolean = !currentEntry.hiBound.isRef(defn.AnyClass) + def hasUpperBound(using Context): Boolean = !currentEntry.hiBound.finalResultType.isExactlyAny /** Unwrap to instance (if instantiated) or origin (if not), until result * is no longer a TypeVar @@ -5185,6 +5250,10 @@ object Types { else result def emptyPolyKind(using Context): TypeBounds = apply(defn.NothingType, defn.AnyKindType) + /** An interval covering all types of the same kind as `tp`. */ + def emptySameKindAs(tp: Type)(using Context): TypeBounds = + val top = tp.topType + if top.isExactlyAny then empty else apply(defn.NothingType, top) def upper(hi: Type)(using Context): TypeBounds = apply(defn.NothingType, hi) def lower(lo: Type)(using Context): TypeBounds = apply(lo, defn.AnyType) } @@ -5213,7 +5282,10 @@ object Types { override def stripTypeVar(using Context): Type = derivedAnnotatedType(parent.stripTypeVar, annot) - override def stripAnnots(using Context): Type = parent.stripAnnots + override def stripAnnots(keep: Annotation => (Context) ?=> Boolean)(using Context): Type = + val p = parent.stripAnnots(keep) + if keep(annot) then derivedAnnotatedType(p, annot) + else p override def stripped(using Context): Type = parent.stripped @@ -5292,7 +5364,12 @@ object Types { abstract class FlexType extends UncachedGroundType with ValueType abstract class ErrorType extends FlexType { + + /** An explanation of the cause of the failure */ def msg(using Context): Message + + /** An explanation of the cause of the failure as a string */ + def explanation(using Context): String = msg.message } object ErrorType: @@ -5306,10 +5383,10 @@ object Types { def msg(using Context): Message = ctx.base.errorTypeMsg.get(this) match case Some(m) => m - case None => "error message from previous run no longer available" + case None => em"error message from previous run no longer available" object UnspecifiedErrorType extends ErrorType { - override def msg(using Context): Message = "unspecified error" + override def msg(using Context): Message = em"unspecified error" } /* Type used to track Select nodes that could not resolve a member and their qualifier is a scala.Dynamic. */ @@ -5355,6 +5432,9 @@ object Types { else result else unique(CachedWildcardType(bounds)) + /** A wildcard matching any type of the same kind as `tp`. */ + def sameKindAs(tp: Type)(using Context): WildcardType = + apply(TypeBounds.emptySameKindAs(tp)) } /** An extractor for single abstract method types. @@ -5496,6 +5576,14 @@ object Types { stop == StopAt.Static && tp.currentSymbol.isStatic && isStaticPrefix(tp.prefix) || stop == StopAt.Package && tp.currentSymbol.is(Package) } + + /** The type parameters of the constructor of this applied type. + * Overridden in OrderingConstraint's ConstraintAwareTraversal to take account + * of instantiations in the constraint that are not yet propagated to the + * instance types of type variables. + */ + protected def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = + tp.tyconTypeParams end VariantTraversal /** A supertrait for some typemaps that are bijections. Used for capture checking. @@ -5603,17 +5691,11 @@ object Types { case tp: NamedType => if stopBecauseStaticOrLocal(tp) then tp else - val prefix1 = atVariance(variance max 0)(this(tp.prefix)) - // A prefix is never contravariant. Even if say `p.A` is used in a contravariant - // context, we cannot assume contravariance for `p` because `p`'s lower - // bound might not have a binding for `A` (e.g. the lower bound could be `Nothing`). - // By contrast, covariance does translate to the prefix, since we have that - // if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member - // of `p`'s upper bound. + val prefix1 = atVariance(variance max 0)(this(tp.prefix)) // see comment of TypeAccumulator's applyToPrefix derivedSelect(tp, prefix1) case tp: AppliedType => - derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tp.tyconTypeParams)) + derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tyconTypeParams(tp))) case tp: LambdaType => mapOverLambda(tp) @@ -5661,6 +5743,12 @@ object Types { case tp @ SuperType(thistp, supertp) => derivedSuperType(tp, this(thistp), this(supertp)) + case tp @ ConstantType(const @ Constant(_: Type)) => + val classType = const.tpe + val classType1 = this(classType) + if classType eq classType1 then tp + else classType1 + case tp: LazyRef => LazyRef { refCtx => given Context = refCtx @@ -5940,7 +6028,7 @@ object Types { case nil => true } - if (distributeArgs(args, tp.tyconTypeParams)) + if (distributeArgs(args, tyconTypeParams(tp))) range(tp.derivedAppliedType(tycon, loBuf.toList), tp.derivedAppliedType(tycon, hiBuf.toList)) else if tycon.isLambdaSub || args.exists(isRangeOfNonTermTypes) then @@ -6024,8 +6112,11 @@ object Types { tp.derivedLambdaType(tp.paramNames, formals, restpe) } + /** Overridden in TypeOps.avoid */ + protected def needsRangeIfInvariant(refs: CaptureSet): Boolean = true + override def mapCapturingType(tp: Type, parent: Type, refs: CaptureSet, v: Int): Type = - if v == 0 then + if v == 0 && needsRangeIfInvariant(refs) then range(mapCapturingType(tp, parent, refs, -1), mapCapturingType(tp, parent, refs, 1)) else super.mapCapturingType(tp, parent, refs, v) @@ -6036,14 +6127,10 @@ object Types { /** A range of possible types between lower bound `lo` and upper bound `hi`. * Only used internally in `ApproximatingTypeMap`. */ - case class Range(lo: Type, hi: Type) extends UncachedGroundType { + case class Range(lo: Type, hi: Type) extends UncachedGroundType: assert(!lo.isInstanceOf[Range]) assert(!hi.isInstanceOf[Range]) - override def toText(printer: Printer): Text = - lo.toText(printer) ~ ".." ~ hi.toText(printer) - } - /** Approximate wildcards by their bounds */ class AvoidWildcardsMap(using Context) extends ApproximatingTypeMap: protected def mapWild(t: WildcardType) = @@ -6062,8 +6149,17 @@ object Types { protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations - protected final def applyToPrefix(x: T, tp: NamedType): T = - atVariance(variance max 0)(this(x, tp.prefix)) // see remark on NamedType case in TypeMap + /** A prefix is never contravariant. Even if say `p.A` is used in a contravariant + * context, we cannot assume contravariance for `p` because `p`'s lower + * bound might not have a binding for `A`, since the lower bound could be `Nothing`. + * By contrast, covariance does translate to the prefix, since we have that + * if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member + * of `p`'s upper bound. + * Overridden in OrderingConstraint's ConstraintAwareTraversal, where a + * more relaxed scheme is used. + */ + protected def applyToPrefix(x: T, tp: NamedType): T = + atVariance(variance max 0)(this(x, tp.prefix)) def foldOver(x: T, tp: Type): T = { record(s"foldOver $getClass") @@ -6086,7 +6182,7 @@ object Types { } foldArgs(acc, tparams.tail, args.tail) } - foldArgs(this(x, tycon), tp.tyconTypeParams, args) + foldArgs(this(x, tycon), tyconTypeParams(tp), args) case _: BoundType | _: ThisType => x diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala index 3b05ee351b86..4aa60d973264 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala @@ -346,6 +346,7 @@ object ClassfileConstants { case JAVA_ACC_ENUM => Enum case JAVA_ACC_ABSTRACT => if (isClass) Abstract else Deferred case JAVA_ACC_INTERFACE => PureInterfaceCreationFlags | JavaDefined + case JAVA_ACC_ANNOTATION => JavaAnnotation case _ => EmptyFlags } @@ -353,18 +354,16 @@ object ClassfileConstants { if (jflag == 0) base else base | translateFlag(jflag) private def translateFlags(jflags: Int, baseFlags: FlagSet): FlagSet = { - val nflags = - if ((jflags & JAVA_ACC_ANNOTATION) == 0) jflags - else jflags & ~(JAVA_ACC_ABSTRACT | JAVA_ACC_INTERFACE) // annotations are neither abstract nor interfaces var res: FlagSet = baseFlags | JavaDefined - res = addFlag(res, nflags & JAVA_ACC_PRIVATE) - res = addFlag(res, nflags & JAVA_ACC_PROTECTED) - res = addFlag(res, nflags & JAVA_ACC_FINAL) - res = addFlag(res, nflags & JAVA_ACC_SYNTHETIC) - res = addFlag(res, nflags & JAVA_ACC_STATIC) - res = addFlag(res, nflags & JAVA_ACC_ENUM) - res = addFlag(res, nflags & JAVA_ACC_ABSTRACT) - res = addFlag(res, nflags & JAVA_ACC_INTERFACE) + res = addFlag(res, jflags & JAVA_ACC_PRIVATE) + res = addFlag(res, jflags & JAVA_ACC_PROTECTED) + res = addFlag(res, jflags & JAVA_ACC_FINAL) + res = addFlag(res, jflags & JAVA_ACC_SYNTHETIC) + res = addFlag(res, jflags & JAVA_ACC_STATIC) + res = addFlag(res, jflags & JAVA_ACC_ENUM) + res = addFlag(res, jflags & JAVA_ACC_ABSTRACT) + res = addFlag(res, jflags & JAVA_ACC_INTERFACE) + res = addFlag(res, jflags & JAVA_ACC_ANNOTATION) res } diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 0b5fda49d63c..eeeb3767bd34 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -165,11 +165,7 @@ class ClassfileParser( * Updates the read pointer of 'in'. */ def parseParents: List[Type] = { val superType = - if (isAnnotation) { - in.nextChar - defn.AnnotationClass.typeRef - } - else if (classRoot.symbol == defn.ComparableClass || + if (classRoot.symbol == defn.ComparableClass || classRoot.symbol == defn.JavaCloneableClass || classRoot.symbol == defn.JavaSerializableClass) { // Treat these interfaces as universal traits @@ -186,7 +182,6 @@ class ClassfileParser( // Consequently, no best implicit for the "Integral" evidence parameter of "range" // is found. Previously, this worked because of weak conformance, which has been dropped. - if (isAnnotation) ifaces = defn.ClassfileAnnotationClass.typeRef :: ifaces superType :: ifaces } @@ -275,6 +270,9 @@ class ClassfileParser( def complete(denot: SymDenotation)(using Context): Unit = { val sym = denot.symbol val isEnum = (jflags & JAVA_ACC_ENUM) != 0 + val isNative = (jflags & JAVA_ACC_NATIVE) != 0 + val isTransient = (jflags & JAVA_ACC_TRANSIENT) != 0 + val isVolatile = (jflags & JAVA_ACC_VOLATILE) != 0 val isConstructor = name eq nme.CONSTRUCTOR /** Strip leading outer param from constructor and trailing access tag for @@ -313,6 +311,12 @@ class ClassfileParser( val isVarargs = denot.is(Flags.Method) && (jflags & JAVA_ACC_VARARGS) != 0 denot.info = sigToType(sig, isVarargs = isVarargs) if (isConstructor) normalizeConstructorParams() + if isNative then + attrCompleter.annotations ::= Annotation.deferredSymAndTree(defn.NativeAnnot)(New(defn.NativeAnnot.typeRef, Nil)) + if isTransient then + attrCompleter.annotations ::= Annotation.deferredSymAndTree(defn.TransientAnnot)(New(defn.TransientAnnot.typeRef, Nil)) + if isVolatile then + attrCompleter.annotations ::= Annotation.deferredSymAndTree(defn.VolatileAnnot)(New(defn.VolatileAnnot.typeRef, Nil)) denot.info = translateTempPoly(attrCompleter.complete(denot.info, isVarargs)) if (isConstructor) normalizeConstructorInfo() @@ -322,7 +326,7 @@ class ClassfileParser( if (isEnum) { val enumClass = sym.owner.linkedClass if (!enumClass.exists) - report.warning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.") + report.warning(em"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.") else { if (!enumClass.is(Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed) enumClass.addAnnotation(Annotation.Child(sym, NoSpan)) @@ -652,7 +656,7 @@ class ClassfileParser( case tp: TypeRef if tp.denot.infoOrCompleter.isInstanceOf[StubInfo] => // Silently ignore missing annotation classes like javac if ctx.debug then - report.warning(i"Error while parsing annotations in ${classfile}: annotation class $tp not present on classpath") + report.warning(em"Error while parsing annotations in ${classfile}: annotation class $tp not present on classpath") None case _ => if (hasError || skip) None @@ -667,7 +671,7 @@ class ClassfileParser( // the classpath would *not* end up here. A class not found is signaled // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example), // and that should never be swallowed silently. - report.warning("Caught: " + ex + " while parsing annotations in " + classfile) + report.warning(em"Caught: $ex while parsing annotations in $classfile") if (ctx.debug) ex.printStackTrace() None // ignore malformed annotations @@ -749,7 +753,7 @@ class ClassfileParser( case tpnme.ConstantValueATTR => val c = pool.getConstant(in.nextChar) if (c ne null) res.constant = c - else report.warning(s"Invalid constant in attribute of ${sym.showLocated} while parsing ${classfile}") + else report.warning(em"Invalid constant in attribute of ${sym.showLocated} while parsing ${classfile}") case tpnme.MethodParametersATTR => val paramCount = in.nextByte @@ -760,7 +764,7 @@ class ClassfileParser( res.namedParams += (i -> name.name) case tpnme.AnnotationDefaultATTR => - sym.addAnnotation(Annotation(defn.AnnotationDefaultAnnot, Nil)) + sym.addAnnotation(Annotation(defn.AnnotationDefaultAnnot, Nil, sym.span)) // Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME case tpnme.RuntimeVisibleAnnotationATTR @@ -836,7 +840,7 @@ class ClassfileParser( class AnnotConstructorCompleter(classInfo: TempClassInfoType) extends LazyType { def complete(denot: SymDenotation)(using Context): Unit = { - val attrs = classInfo.decls.toList.filter(sym => sym.isTerm && sym != denot.symbol) + val attrs = classInfo.decls.toList.filter(sym => sym.isTerm && sym != denot.symbol && sym.name != nme.CONSTRUCTOR) val paramNames = attrs.map(_.name.asTermName) val paramTypes = attrs.map(_.info.resultType) denot.info = MethodType(paramNames, paramTypes, classRoot.typeRef) @@ -963,7 +967,7 @@ class ClassfileParser( } } else { - report.error(s"Could not find $path in ${classfile.underlyingSource}") + report.error(em"Could not find $path in ${classfile.underlyingSource}") Array.empty } case _ => @@ -971,7 +975,7 @@ class ClassfileParser( val name = classfile.name.stripSuffix(".class") + ".tasty" val tastyFileOrNull = dir.lookupName(name, false) if (tastyFileOrNull == null) { - report.error(s"Could not find TASTY file $name under $dir") + report.error(em"Could not find TASTY file $name under $dir") Array.empty } else tastyFileOrNull.toByteArray @@ -1082,10 +1086,10 @@ class ClassfileParser( if (sym == classRoot.symbol) staticScope.lookup(name) else { - var module = sym.companionModule - if (!module.exists && sym.isAbsent()) - module = sym.scalacLinkedClass - module.info.member(name).symbol + var moduleClass = sym.registeredCompanion + if (!moduleClass.exists && sym.isAbsent()) + moduleClass = sym.scalacLinkedClass + moduleClass.info.member(name).symbol } else if (sym == classRoot.symbol) instanceScope.lookup(name) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala index df3e4df497f8..fde6c669045d 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala @@ -9,36 +9,43 @@ import dotty.tools.tasty.TastyFormat.CommentsSection import java.nio.charset.StandardCharsets -class CommentPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Addr, docString: untpd.MemberDef => Option[Comment]): - private val buf = new TastyBuffer(5000) - pickler.newSection(CommentsSection, buf) - - def pickleComment(root: tpd.Tree): Unit = traverse(root) - - private def pickleComment(addr: Addr, comment: Comment): Unit = - if addr != NoAddr then - val bytes = comment.raw.getBytes(StandardCharsets.UTF_8).nn - val length = bytes.length - buf.writeAddr(addr) - buf.writeNat(length) - buf.writeBytes(bytes, length) - buf.writeLongInt(comment.span.coords) - - private def traverse(x: Any): Unit = x match - case x: untpd.Tree @unchecked => - x match - case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d. - for comment <- docString(x) do pickleComment(addrOfTree(x), comment) - case _ => - val limit = x.productArity - var n = 0 - while n < limit do - traverse(x.productElement(n)) - n += 1 - case y :: ys => - traverse(y) - traverse(ys) - case _ => - +object CommentPickler: + + def pickleComments( + pickler: TastyPickler, + addrOfTree: PositionPickler.TreeToAddr, + docString: untpd.MemberDef => Option[Comment], + root: tpd.Tree, + buf: TastyBuffer = new TastyBuffer(5000)): Unit = + + pickler.newSection(CommentsSection, buf) + + def pickleComment(addr: Addr, comment: Comment): Unit = + if addr != NoAddr then + val bytes = comment.raw.getBytes(StandardCharsets.UTF_8).nn + val length = bytes.length + buf.writeAddr(addr) + buf.writeNat(length) + buf.writeBytes(bytes, length) + buf.writeLongInt(comment.span.coords) + + def traverse(x: Any): Unit = x match + case x: untpd.Tree @unchecked => + x match + case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d. + for comment <- docString(x) do pickleComment(addrOfTree(x), comment) + case _ => + val limit = x.productArity + var n = 0 + while n < limit do + traverse(x.productElement(n)) + n += 1 + case y :: ys => + traverse(y) + traverse(ys) + case _ => + + traverse(root) + end pickleComments end CommentPickler diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala index 623508780325..1ddcf9afe1dc 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala @@ -49,9 +49,12 @@ class NameBuffer extends TastyBuffer(10000) { } } - private def withLength(op: => Unit, lengthWidth: Int = 1): Unit = { + private inline def withLength(inline op: Unit, lengthWidth: Int = 1): Unit = { val lengthAddr = currentAddr - for (i <- 0 until lengthWidth) writeByte(0) + var i = 0 + while i < lengthWidth do + writeByte(0) + i += 1 op val length = currentAddr.index - lengthAddr.index - lengthWidth putNat(lengthAddr, length, lengthWidth) @@ -111,11 +114,11 @@ class NameBuffer extends TastyBuffer(10000) { override def assemble(): Unit = { var i = 0 - for ((name, ref) <- nameRefs) { + for (name, ref) <- nameRefs do + val ref = nameRefs(name) assert(ref.index == i) i += 1 pickleNameContents(name) - } } } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala index ad0c051e1b7b..924b87bec003 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala @@ -8,32 +8,40 @@ import dotty.tools.tasty.TastyBuffer import TastyBuffer._ import ast._ -import Trees.WithLazyField +import Trees.WithLazyFields import util.{SourceFile, NoSource} import core._ import Annotations._, Decorators._ import collection.mutable import util.Spans._ +import reporting.Message -class PositionPickler( - pickler: TastyPickler, - addrOfTree: PositionPickler.TreeToAddr, - treeAnnots: untpd.MemberDef => List[tpd.Tree], - relativePathReference: String){ - +object PositionPickler: import ast.tpd._ - val buf: TastyBuffer = new TastyBuffer(5000) - pickler.newSection(PositionsSection, buf) - - private val pickledIndices = new mutable.BitSet + // Note: This could be just TreeToAddr => Addr if functions are specialized to value classes. + // We use a SAM type to avoid boxing of Addr + @FunctionalInterface + trait TreeToAddr: + def apply(x: untpd.Tree): Addr - def header(addrDelta: Int, hasStartDelta: Boolean, hasEndDelta: Boolean, hasPoint: Boolean): Int = { + def header(addrDelta: Int, hasStartDelta: Boolean, hasEndDelta: Boolean, hasPoint: Boolean): Int = def toInt(b: Boolean) = if (b) 1 else 0 (addrDelta << 3) | (toInt(hasStartDelta) << 2) | (toInt(hasEndDelta) << 1) | toInt(hasPoint) - } - def picklePositions(source: SourceFile, roots: List[Tree], warnings: mutable.ListBuffer[String]): Unit = { + def picklePositions( + pickler: TastyPickler, + addrOfTree: TreeToAddr, + treeAnnots: untpd.MemberDef => List[tpd.Tree], + relativePathReference: String, + source: SourceFile, + roots: List[Tree], + warnings: mutable.ListBuffer[Message], + buf: TastyBuffer = new TastyBuffer(5000), + pickledIndices: mutable.BitSet = new mutable.BitSet) = + + pickler.newSection(PositionsSection, buf) + /** Pickle the number of lines followed by the length of each line */ def pickleLineOffsets(): Unit = { val content = source.content() @@ -79,7 +87,7 @@ class PositionPickler( def alwaysNeedsPos(x: Positioned) = x match { case // initialSpan is inaccurate for trees with lazy field - _: WithLazyField[?] + _: WithLazyFields // A symbol is created before the corresponding tree is unpickled, // and its position cannot be changed afterwards. @@ -128,10 +136,6 @@ class PositionPickler( } for (root <- roots) traverse(root, NoSource) - } -} -object PositionPickler: - // Note: This could be just TreeToAddr => Addr if functions are specialized to value classes. - // We use a SAM type to avoid boxing of Addr - @FunctionalInterface trait TreeToAddr: - def apply(x: untpd.Tree): Addr + end picklePositions +end PositionPickler + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala b/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala new file mode 100644 index 000000000000..b36c78a77ac6 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala @@ -0,0 +1,20 @@ +package dotty.tools.dotc.core.tasty +import dotty.tools.tasty.TastyBuffer +import collection.mutable +import java.util.Arrays + +class ScratchData: + var delta, delta1 = new Array[Int](0) + + val positionBuffer = new TastyBuffer(5000) + val pickledIndices = new mutable.BitSet + + val commentBuffer = new TastyBuffer(5000) + + def reset() = + assert(delta ne delta1) + assert(delta.length == delta1.length) + positionBuffer.reset() + pickledIndices.clear() + commentBuffer.reset() + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala index aa657c393815..4f1e84ac9184 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala @@ -38,8 +38,9 @@ class TastyPickler(val rootCls: ClassSymbol) { nameBuffer.assemble() sections.foreach(_._2.assemble()) - val nameBufferHash = TastyHash.pjwHash64(nameBuffer.bytes) - val treeSectionHash +: otherSectionHashes = sections.map(x => TastyHash.pjwHash64(x._2.bytes)): @unchecked + val nameBufferHash = TastyHash.pjwHash64(nameBuffer.bytes, nameBuffer.length) + val treeSectionHash +: otherSectionHashes = + sections.map(x => TastyHash.pjwHash64(x._2.bytes, x._2.length)): @unchecked // Hash of name table and tree val uuidLow: Long = nameBufferHash ^ treeSectionHash diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala index a3dedaaec685..d0f08379c114 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala @@ -10,6 +10,7 @@ import TastyBuffer.{Addr, NoAddr, AddrWidth} import util.Util.bestFit import config.Printers.pickling import ast.untpd.Tree +import java.util.Arrays class TreeBuffer extends TastyBuffer(50000) { @@ -17,7 +18,6 @@ class TreeBuffer extends TastyBuffer(50000) { private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets) private var offsets = new Array[Int](initialOffsetSize) private var isRelative = new Array[Boolean](initialOffsetSize) - private var delta: Array[Int] = _ private var numOffsets = 0 /** A map from trees to the address at which a tree is pickled. */ @@ -68,109 +68,119 @@ class TreeBuffer extends TastyBuffer(50000) { } /** The amount by which the bytes at the given address are shifted under compression */ - def deltaAt(at: Addr): Int = { + def deltaAt(at: Addr, scratch: ScratchData): Int = { val idx = bestFit(offsets, numOffsets, at.index - 1) - if (idx < 0) 0 else delta(idx) + if (idx < 0) 0 else scratch.delta(idx) } /** The address to which `x` is translated under compression */ - def adjusted(x: Addr): Addr = x - deltaAt(x) + def adjusted(x: Addr, scratch: ScratchData): Addr = x - deltaAt(x, scratch) - /** Compute all shift-deltas */ - private def computeDeltas() = { - delta = new Array[Int](numOffsets) - var lastDelta = 0 - var i = 0 - while (i < numOffsets) { - val off = offset(i) - val skippedOff = skipZeroes(off) - val skippedCount = skippedOff.index - off.index - assert(skippedCount < AddrWidth, s"unset field at position $off") - lastDelta += skippedCount - delta(i) = lastDelta - i += 1 - } - } + /** Final assembly, involving the following steps: + * - compute deltas + * - adjust deltas until additional savings are < 1% of total + * - adjust offsets according to the adjusted deltas + * - shrink buffer, skipping zeroes. + */ + def compactify(scratch: ScratchData): Unit = - /** The absolute or relative adjusted address at index `i` of `offsets` array*/ - private def adjustedOffset(i: Int): Addr = { - val at = offset(i) - val original = getAddr(at) - if (isRelative(i)) { - val start = skipNat(at) - val len1 = original + delta(i) - deltaAt(original + start.index) - val len2 = adjusted(original + start.index) - adjusted(start).index - assert(len1 == len2, - s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2") - len1 + def reserve(arr: Array[Int]) = + if arr.length < numOffsets then + new Array[Int](numOffsets) + else + Arrays.fill(arr, 0, numOffsets, 0) + arr + + /** Compute all shift-deltas */ + def computeDeltas() = { + scratch.delta = reserve(scratch.delta) + var lastDelta = 0 + var i = 0 + while (i < numOffsets) { + val off = offset(i) + val skippedOff = skipZeroes(off) + val skippedCount = skippedOff.index - off.index + assert(skippedCount < AddrWidth, s"unset field at position $off") + lastDelta += skippedCount + scratch.delta(i) = lastDelta + i += 1 + } } - else adjusted(original) - } - /** Adjust all offsets according to previously computed deltas */ - private def adjustOffsets(): Unit = - for (i <- 0 until numOffsets) { - val corrected = adjustedOffset(i) - fillAddr(offset(i), corrected) + /** The absolute or relative adjusted address at index `i` of `offsets` array*/ + def adjustedOffset(i: Int): Addr = { + val at = offset(i) + val original = getAddr(at) + if (isRelative(i)) { + val start = skipNat(at) + val len1 = original + scratch.delta(i) - deltaAt(original + start.index, scratch) + val len2 = adjusted(original + start.index, scratch) - adjusted(start, scratch).index + assert(len1 == len2, + s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2") + len1 + } + else adjusted(original, scratch) } - /** Adjust deltas to also take account references that will shrink (and thereby - * generate additional zeroes that can be skipped) due to previously - * computed adjustments. - */ - private def adjustDeltas(): Int = { - val delta1 = new Array[Int](delta.length) - var lastDelta = 0 - var i = 0 - while (i < numOffsets) { - val corrected = adjustedOffset(i) - lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index) - delta1(i) = lastDelta - i += 1 + /** Adjust all offsets according to previously computed deltas */ + def adjustOffsets(): Unit = + var i = 0 + while i < numOffsets do + val corrected = adjustedOffset(i) + fillAddr(offset(i), corrected) + i += 1 + + /** Adjust deltas to also take account references that will shrink (and thereby + * generate additional zeroes that can be skipped) due to previously + * computed adjustments. + */ + def adjustDeltas(): Int = { + scratch.delta1 = reserve(scratch.delta1) + var lastDelta = 0 + var i = 0 + while i < numOffsets do + val corrected = adjustedOffset(i) + lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index) + scratch.delta1(i) = lastDelta + i += 1 + val saved = + if (numOffsets == 0) 0 + else scratch.delta1(numOffsets - 1) - scratch.delta(numOffsets - 1) + val tmp = scratch.delta + scratch.delta = scratch.delta1 + scratch.delta1 = tmp + saved } - val saved = - if (numOffsets == 0) 0 - else delta1(numOffsets - 1) - delta(numOffsets - 1) - delta = delta1 - saved - } - /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */ - private def compress(): Int = { - var lastDelta = 0 - var start = 0 - var i = 0 - var wasted = 0 - def shift(end: Int) = - System.arraycopy(bytes, start, bytes, start - lastDelta, end - start) - while (i < numOffsets) { - val next = offsets(i) - shift(next) - start = next + delta(i) - lastDelta - val pastZeroes = skipZeroes(Addr(next)).index - assert(pastZeroes >= start, s"something's wrong: eliminated non-zero") - wasted += (pastZeroes - start) - lastDelta = delta(i) - i += 1 + /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */ + def compress(): Int = { + var lastDelta = 0 + var start = 0 + var i = 0 + var wasted = 0 + def shift(end: Int) = + System.arraycopy(bytes, start, bytes, start - lastDelta, end - start) + while (i < numOffsets) { + val next = offsets(i) + shift(next) + start = next + scratch.delta(i) - lastDelta + val pastZeroes = skipZeroes(Addr(next)).index + assert(pastZeroes >= start, s"something's wrong: eliminated non-zero") + wasted += (pastZeroes - start) + lastDelta = scratch.delta(i) + i += 1 + } + shift(length) + length -= lastDelta + wasted } - shift(length) - length -= lastDelta - wasted - } - def adjustTreeAddrs(): Unit = - var i = 0 - while i < treeAddrs.size do - treeAddrs.setValue(i, adjusted(Addr(treeAddrs.value(i))).index) - i += 1 + def adjustTreeAddrs(): Unit = + var i = 0 + while i < treeAddrs.size do + treeAddrs.setValue(i, adjusted(Addr(treeAddrs.value(i)), scratch).index) + i += 1 - /** Final assembly, involving the following steps: - * - compute deltas - * - adjust deltas until additional savings are < 1% of total - * - adjust offsets according to the adjusted deltas - * - shrink buffer, skipping zeroes. - */ - def compactify(): Unit = { val origLength = length computeDeltas() //println(s"offsets: ${offsets.take(numOffsets).deep}") @@ -185,5 +195,5 @@ class TreeBuffer extends TastyBuffer(50000) { adjustTreeAddrs() val wasted = compress() pickling.println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now. - } + end compactify } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 475a258e8330..8a396921f32b 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -20,6 +20,8 @@ import collection.mutable import reporting.{Profile, NoProfile} import dotty.tools.tasty.TastyFormat.ASTsSection +object TreePickler: + class StackSizeExceeded(val mdef: tpd.MemberDef) extends Exception class TreePickler(pickler: TastyPickler) { val buf: TreeBuffer = new TreeBuffer @@ -27,6 +29,7 @@ class TreePickler(pickler: TastyPickler) { import buf._ import pickler.nameBuffer.nameIndex import tpd._ + import TreePickler.* private val symRefs = Symbols.MutableSymbolMap[Addr](256) private val forwardSymRefs = Symbols.MutableSymbolMap[List[Addr]]() @@ -53,7 +56,7 @@ class TreePickler(pickler: TastyPickler) { def docString(tree: untpd.MemberDef): Option[Comment] = Option(docStrings.lookup(tree)) - private def withLength(op: => Unit) = { + private inline def withLength(inline op: Unit) = { val lengthAddr = reserveRef(relative = true) op fillRef(lengthAddr, currentAddr, relative = true) @@ -68,15 +71,12 @@ class TreePickler(pickler: TastyPickler) { case _ => } - def registerDef(sym: Symbol): Unit = { + def registerDef(sym: Symbol): Unit = symRefs(sym) = currentAddr - forwardSymRefs.get(sym) match { - case Some(refs) => - refs.foreach(fillRef(_, currentAddr, relative = false)) - forwardSymRefs -= sym - case None => - } - } + val refs = forwardSymRefs.lookup(sym) + if refs != null then + refs.foreach(fillRef(_, currentAddr, relative = false)) + forwardSymRefs -= sym def pickleName(name: Name): Unit = writeNat(nameIndex(name).index) @@ -85,17 +85,19 @@ class TreePickler(pickler: TastyPickler) { if (sig eq Signature.NotAMethod) name else SignedName(name.toTermName, sig, target.asTermName)) - private def pickleSymRef(sym: Symbol)(using Context) = symRefs.get(sym) match { - case Some(label) => - if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym) - case None => + private def pickleSymRef(sym: Symbol)(using Context) = + val label: Addr | Null = symRefs.lookup(sym) + if label == null then // See pos/t1957.scala for an example where this can happen. // I believe it's a bug in typer: the type of an implicit argument refers // to a closure parameter outside the closure itself. TODO: track this down, so that we // can eliminate this case. report.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.srcPos) pickleForwardSymRef(sym) - } + else if label == NoAddr then + pickleForwardSymRef(sym) + else + writeRef(label.uncheckedNN) // !!! Dotty problem: Not clear why nn or uncheckedNN is needed here private def pickleForwardSymRef(sym: Symbol)(using Context) = { val ref = reserveRef(relative = false) @@ -207,7 +209,7 @@ class TreePickler(pickler: TastyPickler) { else if (tpe.prefix == NoPrefix) { writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect) if Config.checkLevelsOnConstraints && !symRefs.contains(sym) && !sym.isPatternBound && !sym.hasAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) then - report.error(i"pickling reference to as yet undefined $tpe with symbol ${sym}", sym.srcPos) + report.error(em"pickling reference to as yet undefined $tpe with symbol ${sym}", sym.srcPos) pickleSymRef(sym) } else tpe.designator match { @@ -285,7 +287,6 @@ class TreePickler(pickler: TastyPickler) { var mods = EmptyFlags if tpe.isContextualMethod then mods |= Given else if tpe.isImplicitMethod then mods |= Implicit - if tpe.isErasedMethod then mods |= Erased pickleMethodic(METHODtype, tpe, mods) case tpe: ParamRef => assert(pickleParamRef(tpe), s"orphan parameter reference: $tpe") @@ -328,23 +329,30 @@ class TreePickler(pickler: TastyPickler) { registerDef(sym) writeByte(tag) val addr = currentAddr - withLength { - pickleName(sym.name) - pickleParams - tpt match { - case _: Template | _: Hole => pickleTree(tpt) - case _ if tpt.isType => pickleTpt(tpt) + try + withLength { + pickleName(sym.name) + pickleParams + tpt match { + case _: Template | _: Hole => pickleTree(tpt) + case _ if tpt.isType => pickleTpt(tpt) + } + pickleTreeUnlessEmpty(rhs) + pickleModifiers(sym, mdef) } - pickleTreeUnlessEmpty(rhs) - pickleModifiers(sym, mdef) - } + catch + case ex: Throwable => + if !ctx.settings.YnoDecodeStacktraces.value + && handleRecursive.underlyingStackOverflowOrNull(ex) != null then + throw StackSizeExceeded(mdef) + else + throw ex if sym.is(Method) && sym.owner.isClass then profile.recordMethodSize(sym, currentAddr.index - addr.index, mdef.span) - for - docCtx <- ctx.docCtx - comment <- docCtx.docstring(sym) - do - docStrings(mdef) = comment + for docCtx <- ctx.docCtx do + val comment = docCtx.docstrings.lookup(sym) + if comment != null then + docStrings(mdef) = comment } def pickleParam(tree: Tree)(using Context): Unit = { @@ -426,6 +434,13 @@ class TreePickler(pickler: TastyPickler) { writeByte(THROW) pickleTree(args.head) } + else if fun.symbol.originalSignaturePolymorphic.exists then + writeByte(APPLYsigpoly) + withLength { + pickleTree(fun) + pickleType(fun.tpe.widenTermRefExpr, richTypes = true) // this widens to a MethodType, so need richTypes + args.foreach(pickleTree) + } else { writeByte(APPLY) withLength { @@ -451,6 +466,7 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleTree(qual); if (!mix.isEmpty) { + // mixinType being a TypeRef when mix is non-empty is enforced by TreeChecker#checkSuper val SuperType(_, mixinType: TypeRef) = tree.tpe: @unchecked pickleTree(mix.withType(mixinType)) } @@ -776,18 +792,39 @@ class TreePickler(pickler: TastyPickler) { def pickle(trees: List[Tree])(using Context): Unit = { profile = Profile.current - trees.foreach(tree => if (!tree.isEmpty) pickleTree(tree)) + for tree <- trees do + try + if !tree.isEmpty then pickleTree(tree) + catch case ex: StackSizeExceeded => + report.error( + em"""Recursion limit exceeded while pickling ${ex.mdef} + |in ${ex.mdef.symbol.showLocated}. + |You could try to increase the stacksize using the -Xss JVM option. + |For the unprocessed stack trace, compile with -Yno-decode-stacktraces.""", + ex.mdef.srcPos) + def missing = forwardSymRefs.keysIterator .map(sym => i"${sym.showLocated} (line ${sym.srcPos.line}) #${sym.id}") .toList assert(forwardSymRefs.isEmpty, i"unresolved symbols: $missing%, % when pickling ${ctx.source}") } - def compactify(): Unit = { - buf.compactify() + def compactify(scratch: ScratchData = new ScratchData): Unit = { + buf.compactify(scratch) def updateMapWithDeltas(mp: MutableSymbolMap[Addr]) = - for (key <- mp.keysIterator.toBuffer[Symbol]) mp(key) = adjusted(mp(key)) + val keys = new Array[Symbol](mp.size) + val it = mp.keysIterator + var i = 0 + while i < keys.length do + keys(i) = it.next + i += 1 + assert(!it.hasNext) + i = 0 + while i < keys.length do + val key = keys(i) + mp(key) = adjusted(mp(key), scratch) + i += 1 updateMapWithDeltas(symRefs) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 6887937ed6fe..9078a8959112 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -32,7 +32,7 @@ import ast.{Trees, tpd, untpd} import Trees._ import Decorators._ import transform.SymUtils._ -import cc.adaptFunctionTypeUnderCC +import cc.{adaptFunctionTypeUnderPureFuns, adaptByNameArgUnderPureFuns} import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer._ @@ -74,6 +74,9 @@ class TreeUnpickler(reader: TastyReader, */ private val typeAtAddr = new mutable.HashMap[Addr, Type] + /** If this is a pickled quote, the owner of the quote, otherwise NoSymbol. */ + private var rootOwner: Symbol = NoSymbol + /** The root symbol denotation which are defined by the Tasty file associated with this * TreeUnpickler. Set by `enterTopLevel`. */ @@ -87,8 +90,8 @@ class TreeUnpickler(reader: TastyReader, /** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */ private var ownerTree: OwnerTree = _ - /** Was unpickled class compiled with -Ycc? */ - private var wasCaptureChecked: Boolean = false + /** Was unpickled class compiled with pureFunctions? */ + private var knowsPureFuns: Boolean = false private def registerSym(addr: Addr, sym: Symbol) = symAtAddr(addr) = sym @@ -106,6 +109,7 @@ class TreeUnpickler(reader: TastyReader, /** The unpickled trees */ def unpickle(mode: UnpickleMode)(using Context): List[Tree] = { + if mode != UnpickleMode.TopLevel then rootOwner = ctx.owner assert(roots != null, "unpickle without previous enterTopLevel") val rdr = new TreeReader(reader) mode match { @@ -245,7 +249,6 @@ class TreeUnpickler(reader: TastyReader, while currentAddr != end do // avoid boxing the mods readByte() match case IMPLICIT => mods |= Implicit - case ERASED => mods |= Erased case GIVEN => mods |= Given (names, mods) @@ -402,9 +405,7 @@ class TreeUnpickler(reader: TastyReader, case METHODtype => def methodTypeCompanion(mods: FlagSet): MethodTypeCompanion = if mods.is(Implicit) then ImplicitMethodType - else if mods.isAllOf(Erased | Given) then ErasedContextualMethodType else if mods.is(Given) then ContextualMethodType - else if mods.is(Erased) then ErasedMethodType else MethodType readMethodic(methodTypeCompanion, _.toTermName) case TYPELAMBDAtype => @@ -455,7 +456,8 @@ class TreeUnpickler(reader: TastyReader, val ref = readAddr() typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) case BYNAMEtype => - ExprType(readType()) + val arg = readType() + ExprType(if knowsPureFuns then arg else arg.adaptByNameArgUnderPureFuns) case _ => ConstantType(readConstant(tag)) } @@ -489,11 +491,11 @@ class TreeUnpickler(reader: TastyReader, def readTermRef()(using Context): TermRef = readType().asInstanceOf[TermRef] - /** Under -Ycc, map all function types to impure function types, - * unless the unpickled class was also compiled with -Ycc. + /** Under pureFunctions, map all function types to impure function types, + * unless the unpickled class was also compiled with pureFunctions. */ private def postProcessFunction(tp: Type)(using Context): Type = - if wasCaptureChecked then tp else tp.adaptFunctionTypeUnderCC + if knowsPureFuns then tp else tp.adaptFunctionTypeUnderPureFuns // ------ Reading definitions ----------------------------------------------------- @@ -624,7 +626,9 @@ class TreeUnpickler(reader: TastyReader, else newSymbol(ctx.owner, name, flags, completer, privateWithin, coord) } - val annots = annotFns.map(_(sym.owner)) + val annotOwner = + if sym.owner.isClass then newLocalDummy(sym.owner) else sym.owner + val annots = annotFns.map(_(annotOwner)) sym.annotations = annots if sym.isOpaqueAlias then sym.setFlag(Deferred) val isScala2MacroDefinedInScala3 = flags.is(Macro, butNot = Inline) && flags.is(Erased) @@ -642,8 +646,8 @@ class TreeUnpickler(reader: TastyReader, } registerSym(start, sym) if (isClass) { - if sym.owner.is(Package) && annots.exists(_.symbol == defn.CaptureCheckedAnnot) then - wasCaptureChecked = true + if sym.owner.is(Package) && annots.exists(_.hasSymbol(defn.WithPureFunsAnnot)) then + knowsPureFuns = true sym.completer.withDecls(newScope) forkAt(templateStart).indexTemplateParams()(using localContext(sym)) } @@ -737,7 +741,15 @@ class TreeUnpickler(reader: TastyReader, val tp = readType() val lazyAnnotTree = readLaterWithOwner(end, _.readTerm()) owner => - Annotation.deferredSymAndTree(tp.typeSymbol)(lazyAnnotTree(owner).complete) + new DeferredSymAndTree(tp.typeSymbol, lazyAnnotTree(owner).complete): + // Only force computation of symbol if it has the right name. This added + // amount of laziness is sometimes necessary to avid cycles. Test case pos/i15980. + override def hasSymbol(sym: Symbol)(using Context) = tp match + case tp: TypeRef => + tp.designator match + case name: Name => name == sym.name && tp.symbol == sym + case _ => tp.symbol == sym + case _ => this.symbol == sym /** Create symbols for the definitions in the statement sequence between * current address and `end`. @@ -948,6 +960,51 @@ class TreeUnpickler(reader: TastyReader, tree.setDefTree } + /** Read enough of parent to determine its type, without reading arguments + * of applications. This is necessary to make TreeUnpickler as lazy as Namer + * in this regard. See i16673 for a test case. + */ + private def readParentType()(using Context): Type = + readByte() match + case TYPEAPPLY => + val end = readEnd() + val tycon = readParentType() + if tycon.typeParams.isEmpty then + goto(end) + tycon + else + val args = until(end)(readTpt()) + val cls = tycon.classSymbol + assert(cls.typeParams.hasSameLengthAs(args)) + cls.typeRef.appliedTo(args.tpes) + case APPLY | BLOCK => + val end = readEnd() + try readParentType() + finally goto(end) + case SELECTin => + val end = readEnd() + readName() + readTerm() match + case nu: New => + try nu.tpe + finally goto(end) + case SHAREDterm => + forkAt(readAddr()).readParentType() + + /** Read template parents + * @param withArgs if false, only read enough of parent trees to determine their type + * but skip constructor arguments. Return any trees that were partially + * parsed in this way as InferredTypeTrees. + */ + def readParents(withArgs: Boolean)(using Context): List[Tree] = + collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { + nextUnsharedTag match + case APPLY | TYPEAPPLY | BLOCK => + if withArgs then readTerm() + else InferredTypeTree().withType(readParentType()) + case _ => readTpt() + } + private def readTemplate(using Context): Template = { val start = currentAddr assert(sourcePathAt(start).isEmpty) @@ -970,12 +1027,8 @@ class TreeUnpickler(reader: TastyReader, while (bodyIndexer.reader.nextByte != DEFDEF) bodyIndexer.skipTree() bodyIndexer.indexStats(end) } - val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { - nextUnsharedTag match { - case APPLY | TYPEAPPLY | BLOCK => readTerm()(using parentCtx) - case _ => readTpt()(using parentCtx) - } - } + val parentReader = fork + val parents = readParents(withArgs = false)(using parentCtx) val parentTypes = parents.map(_.tpe.dealias) val self = if (nextByte == SELFDEF) { @@ -989,7 +1042,13 @@ class TreeUnpickler(reader: TastyReader, selfInfo = if (self.isEmpty) NoType else self.tpt.tpe) .integrateOpaqueMembers val constr = readIndexedDef().asInstanceOf[DefDef] - val mappedParents = parents.map(_.changeOwner(localDummy, constr.symbol)) + val mappedParents: LazyTreeList = + if parents.exists(_.isInstanceOf[InferredTypeTree]) then + // parents were not read fully, will need to be read again later on demand + new LazyReader(parentReader, localDummy, ctx.mode, ctx.source, + _.readParents(withArgs = true) + .map(_.changeOwner(localDummy, constr.symbol))) + else parents val lazyStats = readLater(end, rdr => { val stats = rdr.readIndexedStats(localDummy, end) @@ -998,7 +1057,7 @@ class TreeUnpickler(reader: TastyReader, defn.patchStdLibClass(cls) NamerOps.addConstructorProxies(cls) setSpan(start, - untpd.Template(constr, mappedParents, Nil, self, lazyStats) + untpd.Template(constr, mappedParents, self, lazyStats) .withType(localDummy.termRef)) } @@ -1170,7 +1229,8 @@ class TreeUnpickler(reader: TastyReader, case SINGLETONtpt => SingletonTypeTree(readTerm()) case BYNAMEtpt => - ByNameTypeTree(readTpt()) + val arg = readTpt() + ByNameTypeTree(if knowsPureFuns then arg else arg.adaptByNameArgUnderPureFuns) case NAMEDARG => NamedArg(readName(), readTerm()) case _ => @@ -1226,6 +1286,12 @@ class TreeUnpickler(reader: TastyReader, else tpd.Apply(fn, args) case TYPEAPPLY => tpd.TypeApply(readTerm(), until(end)(readTpt())) + case APPLYsigpoly => + val fn = readTerm() + val methType = readType() + val args = until(end)(readTerm()) + val fun2 = typer.Applications.retypeSignaturePolymorphicFn(fn, methType) + tpd.Apply(fun2, args) case TYPED => val expr = readTerm() val tpt = readTpt() @@ -1570,7 +1636,7 @@ class TreeUnpickler(reader: TastyReader, pickling.println(i"no owner for $addr among $cs%, %") throw ex } - try search(children, NoSymbol) + try search(children, rootOwner) catch { case ex: TreeWithoutOwner => pickling.println(s"ownerTree = $ownerTree") diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala index f2d25d0f34b5..cc2d7dd7ee56 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala @@ -39,9 +39,9 @@ object Scala2Erasure: case RefinedType(parent, _, _) => checkSupported(parent) case AnnotatedType(parent, _) if parent.dealias.isInstanceOf[Scala2RefinedType] => - throw new TypeError(i"Unsupported Scala 2 type: Component $parent of intersection is annotated.") + throw TypeError(em"Unsupported Scala 2 type: Component $parent of intersection is annotated.") case tp @ TypeRef(prefix, _) if !tp.symbol.exists && prefix.dealias.isInstanceOf[Scala2RefinedType] => - throw new TypeError(i"Unsupported Scala 2 type: Prefix $prefix of intersection component is an intersection or refinement.") + throw TypeError(em"Unsupported Scala 2 type: Prefix $prefix of intersection component is an intersection or refinement.") case _ => /** A type that would be represented as a RefinedType in Scala 2. diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 333cd9fa9ec3..50b0b875c1fc 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -20,6 +20,7 @@ import printing.Texts._ import printing.Printer import io.AbstractFile import util.common._ +import util.NoSourcePosition import typer.Checking.checkNonCyclic import typer.Nullables._ import transform.SymUtils._ @@ -32,7 +33,7 @@ import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.annotation.switch import reporting._ -import cc.adaptFunctionTypeUnderCC +import cc.{adaptFunctionTypeUnderPureFuns, adaptByNameArgUnderPureFuns} object Scala2Unpickler { @@ -88,7 +89,11 @@ object Scala2Unpickler { val sourceModule = denot.sourceModule.orElse { // For non-toplevel modules, `sourceModule` won't be set when completing // the module class, we need to go find it ourselves. - NamerOps.findModuleBuddy(cls.name.sourceModuleName, denot.owner.info.decls) + val modName = cls.name.sourceModuleName + val alternate = + if cls.privateWithin.exists && cls.owner.is(Trait) then modName.expandedName(cls.owner) + else EmptyTermName + NamerOps.findModuleBuddy(modName, denot.owner.info.decls, alternate) } denot.owner.thisType.select(sourceModule) else selfInfo @@ -744,7 +749,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val anyTypes = boundSyms map (_ => defn.AnyType) val boundBounds = boundSyms map (_.info.bounds.hi) val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes) - report.warning(FailureToEliminateExistential(tp, tp1, tp2, boundSyms, classRoot.symbol)) + report.warning(FailureToEliminateExistential(tp, tp1, tp2, boundSyms, classRoot.symbol), NoSourcePosition) tp2 } else tp1 @@ -816,7 +821,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } val tycon = select(pre, sym) val args = until(end, () => readTypeRef()) - if (sym == defn.ByNameParamClass2x) ExprType(args.head) + if (sym == defn.ByNameParamClass2x) ExprType(args.head.adaptByNameArgUnderPureFuns) else if (ctx.settings.scalajs.value && args.length == 2 && sym.owner == JSDefinitions.jsdefn.ScalaJSJSPackageClass && sym == JSDefinitions.jsdefn.PseudoUnionClass) { // Treat Scala.js pseudo-unions as real unions, this requires a @@ -825,7 +830,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } else if args.nonEmpty then tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args.map(translateTempPoly))) - .adaptFunctionTypeUnderCC + .adaptFunctionTypeUnderPureFuns else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams) else tycon case TYPEBOUNDStpe => diff --git a/compiler/src/dotty/tools/dotc/coverage/Coverage.scala b/compiler/src/dotty/tools/dotc/coverage/Coverage.scala index 8ae249c1f5a3..e41bfcd5d09a 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Coverage.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Coverage.scala @@ -13,7 +13,6 @@ class Coverage: /** A statement that can be invoked, and thus counted as "covered" by code coverage tools. */ case class Statement( - source: String, location: Location, id: Int, start: Int, diff --git a/compiler/src/dotty/tools/dotc/coverage/Location.scala b/compiler/src/dotty/tools/dotc/coverage/Location.scala index faf1e97d0c01..c565c2bb1116 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Location.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Location.scala @@ -5,6 +5,7 @@ import ast.tpd._ import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags.* import java.nio.file.Path +import dotty.tools.dotc.util.SourceFile /** Information about the location of a coverable piece of code. * @@ -12,7 +13,7 @@ import java.nio.file.Path * @param className name of the closest enclosing class * @param fullClassName fully qualified name of the closest enclosing class * @param classType "type" of the closest enclosing class: Class, Trait or Object - * @param method name of the closest enclosing method + * @param method name of the closest enclosing method * @param sourcePath absolute path of the source file */ final case class Location( @@ -20,17 +21,19 @@ final case class Location( className: String, fullClassName: String, classType: String, - method: String, + methodName: String, sourcePath: Path ) object Location: /** Extracts the location info of a Tree. */ - def apply(tree: Tree)(using ctx: Context): Location = + def apply(tree: Tree, source: SourceFile)(using ctx: Context): Location = - val enclosingClass = ctx.owner.denot.enclosingClass - val packageName = ctx.owner.denot.enclosingPackageClass.name.toSimpleName.toString + val ownerDenot = ctx.owner.denot + val enclosingClass = ownerDenot.enclosingClass + val packageName = ownerDenot.enclosingPackageClass.fullName.toSimpleName.toString val className = enclosingClass.name.toSimpleName.toString + val methodName = ownerDenot.enclosingMethod.name.toSimpleName.toString val classType: String = if enclosingClass.is(Trait) then "Trait" @@ -42,6 +45,6 @@ object Location: className, s"$packageName.$className", classType, - ctx.owner.denot.enclosingMethod.name.toSimpleName.toString(), - ctx.source.file.absolute.jpath + methodName, + source.file.absolute.jpath ) diff --git a/compiler/src/dotty/tools/dotc/coverage/Serializer.scala b/compiler/src/dotty/tools/dotc/coverage/Serializer.scala index 23ab73f6d42e..26efa8934e00 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Serializer.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Serializer.scala @@ -4,6 +4,7 @@ package coverage import java.nio.file.{Path, Paths, Files} import java.io.Writer import scala.language.unsafeNulls +import scala.collection.mutable.StringBuilder /** * Serializes scoverage data. @@ -62,21 +63,21 @@ object Serializer: def writeStatement(stmt: Statement, writer: Writer): Unit = // Note: we write 0 for the count because we have not measured the actual coverage at this point writer.write(s"""${stmt.id} - |${getRelativePath(stmt.location.sourcePath)} - |${stmt.location.packageName} - |${stmt.location.className} + |${getRelativePath(stmt.location.sourcePath).escaped} + |${stmt.location.packageName.escaped} + |${stmt.location.className.escaped} |${stmt.location.classType} - |${stmt.location.fullClassName} - |${stmt.location.method} + |${stmt.location.fullClassName.escaped} + |${stmt.location.methodName.escaped} |${stmt.start} |${stmt.end} |${stmt.line} - |${stmt.symbolName} + |${stmt.symbolName.escaped} |${stmt.treeName} |${stmt.branch} |0 |${stmt.ignored} - |${stmt.desc} + |${stmt.desc.escaped} |\f |""".stripMargin) @@ -84,3 +85,27 @@ object Serializer: coverage.statements.toSeq .sortBy(_.id) .foreach(stmt => writeStatement(stmt, writer)) + + /** Makes a String suitable for output in the coverage statement data as a single line. + * Escaped characters: '\\' (backslash), '\n', '\r', '\f' + */ + extension (str: String) def escaped: String = + val builder = StringBuilder(str.length) + var i = 0 + while + i < str.length + do + str.charAt(i) match + case '\\' => + builder ++= "\\\\" + case '\n' => + builder ++= "\\n" + case '\r' => + builder ++= "\\r" + case '\f' => + builder ++= "\\f" + case c => + builder += c + i += 1 + end while + builder.result() diff --git a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala index 864f5277bff3..86ae99b3e0f9 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala @@ -29,7 +29,7 @@ class ReadTasty extends Phase { val className = unit.className.toTypeName def cannotUnpickle(reason: String): None.type = { - report.error(s"class $className cannot be unpickled because $reason") + report.error(em"class $className cannot be unpickled because $reason") None } diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index 04c65a3d3882..fb0abe3332ed 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -6,6 +6,7 @@ import scala.language.unsafeNulls import io.{JarArchive, AbstractFile, Path} import core.Contexts._ +import core.Decorators.em import java.io.File class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { @@ -27,7 +28,7 @@ class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { .toList case "tasty" => TastyFileUtil.getClassName(file) case _ => - report.error(s"File extension is not `tasty` or `jar`: ${file.path}") + report.error(em"File extension is not `tasty` or `jar`: ${file.path}") Nil } classNames.map(new TASTYCompilationUnit(_)) diff --git a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala index debf51872d5a..ebb76e9e9bf9 100644 --- a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala +++ b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala @@ -148,47 +148,6 @@ class InlineReducer(inliner: Inliner)(using Context): binding1.withSpan(call.span) } - /** Rewrite an application - * - * ((x1, ..., xn) => b)(e1, ..., en) - * - * to - * - * val/def x1 = e1; ...; val/def xn = en; b - * - * where `def` is used for call-by-name parameters. However, we shortcut any NoPrefix - * refs among the ei's directly without creating an intermediate binding. - */ - def betaReduce(tree: Tree)(using Context): Tree = tree match { - case Apply(Select(cl @ closureDef(ddef), nme.apply), args) if defn.isFunctionType(cl.tpe) => - // closureDef also returns a result for closures wrapped in Inlined nodes. - // These need to be preserved. - def recur(cl: Tree): Tree = cl match - case Inlined(call, bindings, expr) => - cpy.Inlined(cl)(call, bindings, recur(expr)) - case _ => ddef.tpe.widen match - case mt: MethodType if ddef.paramss.head.length == args.length => - val bindingsBuf = new DefBuffer - val argSyms = mt.paramNames.lazyZip(mt.paramInfos).lazyZip(args).map { (name, paramtp, arg) => - arg.tpe.dealias match { - case ref @ TermRef(NoPrefix, _) => ref.symbol - case _ => - paramBindingDef(name, paramtp, arg, bindingsBuf)( - using ctx.withSource(cl.source) - ).symbol - } - } - val expander = new TreeTypeMap( - oldOwners = ddef.symbol :: Nil, - newOwners = ctx.owner :: Nil, - substFrom = ddef.paramss.head.map(_.symbol), - substTo = argSyms) - Block(bindingsBuf.toList, expander.transform(ddef.rhs)).withSpan(tree.span) - case _ => tree - recur(cl) - case _ => tree - } - /** The result type of reducing a match. It consists optionally of a list of bindings * for the pattern-bound variables and the RHS of the selected case. * Returns `None` if no case was selected. @@ -269,12 +228,21 @@ class InlineReducer(inliner: Inliner)(using Context): } } - // Extractors contain Bind nodes in type parameter lists, the tree looks like this: + // Extractors can contain Bind nodes in type parameter lists, + // for that case tree looks like this: // UnApply[t @ t](pats)(implicits): T[t] // Test case is pos/inline-caseclass.scala. + // Alternatively, for explicitly specified type binds in type annotations like in + // case A(B): A[t] + // the tree will look like this: + // Unapply[t](pats)(implicits) : T[t @ t] + // and the binds will be found in the type tree instead + // Test case is pos-macros/i15971 + val tptBinds = getBinds(Set.empty[TypeSymbol], tpt) val binds: Set[TypeSymbol] = pat match { - case UnApply(TypeApply(_, tpts), _, _) => getBinds(Set.empty[TypeSymbol], tpts) - case _ => getBinds(Set.empty[TypeSymbol], tpt) + case UnApply(TypeApply(_, tpts), _, _) => + getBinds(Set.empty[TypeSymbol], tpts) ++ tptBinds + case _ => tptBinds } val extractBindVariance = new TypeAccumulator[TypeBindsMap] { @@ -303,11 +271,11 @@ class InlineReducer(inliner: Inliner)(using Context): def addTypeBindings(typeBinds: TypeBindsMap)(using Context): Unit = typeBinds.foreachBinding { case (sym, shouldBeMinimized) => newTypeBinding(sym, - ctx.gadt.approximation(sym, fromBelow = shouldBeMinimized, maxLevel = Int.MaxValue)) + ctx.gadtState.approximation(sym, fromBelow = shouldBeMinimized, maxLevel = Int.MaxValue)) } def registerAsGadtSyms(typeBinds: TypeBindsMap)(using Context): Unit = - if (typeBinds.size > 0) ctx.gadt.addToConstraint(typeBinds.keys) + if (typeBinds.size > 0) ctx.gadtState.addToConstraint(typeBinds.keys) pat match { case Typed(pat1, tpt) => diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index bea42e82ce6f..872dc7793ff4 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -21,7 +21,9 @@ import collection.mutable import reporting.trace import util.Spans.Span import dotty.tools.dotc.transform.Splicer +import dotty.tools.dotc.transform.BetaReduce import quoted.QuoteUtils +import staging.StagingLevel import scala.annotation.constructorOnly /** General support for inlining */ @@ -227,7 +229,7 @@ class Inliner(val call: tpd.Tree)(using Context): val binding = { var newArg = arg.changeOwner(ctx.owner, boundSym) if bindingFlags.is(Inline) && argIsBottom then - newArg = Typed(newArg, TypeTree(formal)) // type ascribe RHS to avoid type errors in expansion. See i8612.scala + newArg = Typed(newArg, TypeTree(formal.widenExpr)) // type ascribe RHS to avoid type errors in expansion. See i8612.scala if isByName then DefDef(boundSym, newArg) else ValDef(boundSym, newArg) }.withSpan(boundSym.span) @@ -253,7 +255,7 @@ class Inliner(val call: tpd.Tree)(using Context): computeParamBindings(tp.resultType, targs.drop(tp.paramNames.length), argss, formalss, buf) case tp: MethodType => if argss.isEmpty then - report.error(i"missing arguments for inline method $inlinedMethod", call.srcPos) + report.error(em"missing arguments for inline method $inlinedMethod", call.srcPos) false else tp.paramNames.lazyZip(formalss.head).lazyZip(argss.head).foreach { (name, formal, arg) => @@ -616,8 +618,8 @@ class Inliner(val call: tpd.Tree)(using Context): def issueError() = callValueArgss match { case (msgArg :: Nil) :: Nil => val message = msgArg.tpe match { - case ConstantType(Constant(msg: String)) => msg - case _ => s"A literal string is expected as an argument to `compiletime.error`. Got ${msgArg.show}" + case ConstantType(Constant(msg: String)) => msg.toMessage + case _ => em"A literal string is expected as an argument to `compiletime.error`. Got $msgArg" } // Usually `error` is called from within a rewrite method. In this // case we need to report the error at the point of the outermost enclosing inline @@ -749,9 +751,9 @@ class Inliner(val call: tpd.Tree)(using Context): ctx override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree = - val tree1 = inlineIfNeeded( - tryInlineArg(tree.asInstanceOf[tpd.Tree]) `orElse` super.typedIdent(tree, pt) - ) + val locked = ctx.typerState.ownedVars + val tree0 = tryInlineArg(tree.asInstanceOf[tpd.Tree]) `orElse` super.typedIdent(tree, pt) + val tree1 = inlineIfNeeded(tree0, pt, locked) tree1 match case id: Ident if tpd.needsSelect(id.tpe) => inlining.println(i"expanding $id to selection") @@ -760,6 +762,7 @@ class Inliner(val call: tpd.Tree)(using Context): tree1 override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { + val locked = ctx.typerState.ownedVars val qual1 = typed(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) val resNoReduce = untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt) val reducedProjection = reducer.reduceProjection(resNoReduce) @@ -771,7 +774,7 @@ class Inliner(val call: tpd.Tree)(using Context): if resNoReduce ne res then typed(res, pt) // redo typecheck if reduction changed something else if res.symbol.isInlineMethod then - inlineIfNeeded(res) + inlineIfNeeded(res, pt, locked) else ensureAccessible(res.tpe, tree.qualifier.isInstanceOf[untpd.Super], tree.srcPos) res @@ -809,19 +812,22 @@ class Inliner(val call: tpd.Tree)(using Context): tree match case Quoted(Spliced(inner)) => inner case _ => tree - val res = cancelQuotes(constToLiteral(betaReduce(super.typedApply(tree, pt)))) match { + val locked = ctx.typerState.ownedVars + val res = cancelQuotes(constToLiteral(BetaReduce(super.typedApply(tree, pt)))) match { case res: Apply if res.symbol == defn.QuotedRuntime_exprSplice - && StagingContext.level == 0 + && StagingLevel.level == 0 && !hasInliningErrors => val expanded = expandMacro(res.args.head, tree.srcPos) + transform.TreeChecker.checkMacroGeneratedTree(res, expanded) typedExpr(expanded) // Inline calls and constant fold code generated by the macro case res => - specializeEq(inlineIfNeeded(res)) + specializeEq(inlineIfNeeded(res, pt, locked)) } res override def typedTypeApply(tree: untpd.TypeApply, pt: Type)(using Context): Tree = - val tree1 = inlineIfNeeded(constToLiteral(betaReduce(super.typedTypeApply(tree, pt)))) + val locked = ctx.typerState.ownedVars + val tree1 = inlineIfNeeded(constToLiteral(BetaReduce(super.typedTypeApply(tree, pt))), pt, locked) if tree1.symbol.isQuote then ctx.compilationUnit.needsStaging = true tree1 @@ -889,11 +895,11 @@ class Inliner(val call: tpd.Tree)(using Context): /** True if this inline typer has already issued errors */ override def hasInliningErrors(using Context) = ctx.reporter.errorCount > initialErrorCount - private def inlineIfNeeded(tree: Tree)(using Context): Tree = + private def inlineIfNeeded(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = val meth = tree.symbol if meth.isAllOf(DeferredInline) then - errorTree(tree, i"Deferred inline ${meth.showLocated} cannot be invoked") - else if Inlines.needsInlining(tree) then Inlines.inlineCall(tree) + errorTree(tree, em"Deferred inline ${meth.showLocated} cannot be invoked") + else if Inlines.needsInlining(tree) then Inlines.inlineCall(simplify(tree, pt, locked)) else tree override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = @@ -1002,7 +1008,7 @@ class Inliner(val call: tpd.Tree)(using Context): super.transform(t1) case t: Apply => val t1 = super.transform(t) - if (t1 `eq` t) t else reducer.betaReduce(t1) + if (t1 `eq` t) t else BetaReduce(t1) case Block(Nil, expr) => super.transform(expr) case _ => @@ -1021,7 +1027,7 @@ class Inliner(val call: tpd.Tree)(using Context): } private def expandMacro(body: Tree, splicePos: SrcPos)(using Context) = { - assert(StagingContext.level == 0) + assert(StagingLevel.level == 0) val inlinedFrom = enclosingInlineds.last val dependencies = macroDependencies(body) val suspendable = ctx.compilationUnit.isSuspendable diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index d1a88406fe45..36dc8a642afc 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -14,6 +14,7 @@ import ErrorReporting.errorTree import dotty.tools.dotc.util.{SourceFile, SourcePosition, SrcPos} import parsing.Parsers.Parser import transform.{PostTyper, Inlining, CrossVersionChecks} +import staging.StagingLevel import collection.mutable import reporting.trace @@ -56,7 +57,7 @@ object Inlines: case _ => isInlineable(tree.symbol) && !tree.tpe.widenTermRefExpr.isInstanceOf[MethodOrPoly] - && StagingContext.level == 0 + && StagingLevel.level == 0 && ( ctx.phase == Phases.inliningPhase || (ctx.phase == Phases.typerPhase && needsTransparentInlining(tree)) @@ -85,7 +86,10 @@ object Inlines: if (tree.symbol == defn.CompiletimeTesting_typeChecks) return Intrinsics.typeChecks(tree) if (tree.symbol == defn.CompiletimeTesting_typeCheckErrors) return Intrinsics.typeCheckErrors(tree) - CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) + if ctx.isAfterTyper then + // During typer we wait with cross version checks until PostTyper, in order + // not to provoke cyclic references. See i16116 for a test case. + CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) if tree.symbol.isConstructor then return tree // error already reported for the inline constructor definition @@ -153,9 +157,9 @@ object Inlines: else ("successive inlines", ctx.settings.XmaxInlines) errorTree( tree, - i"""|Maximal number of $reason (${setting.value}) exceeded, - |Maybe this is caused by a recursive inline method? - |You can use ${setting.name} to change the limit.""", + em"""|Maximal number of $reason (${setting.value}) exceeded, + |Maybe this is caused by a recursive inline method? + |You can use ${setting.name} to change the limit.""", (tree :: enclosingInlineds).last.srcPos ) if ctx.base.stopInlining && enclosingInlineds.isEmpty then @@ -178,37 +182,28 @@ object Inlines: // as its right hand side. The call to the wrapper unapply serves as the signpost for pattern matching. // After pattern matching, the anonymous class is removed in phase InlinePatterns with a beta reduction step. // - // An inline unapply `P.unapply` in a plattern `P(x1,x2,...)` is transformed into - // `{ class $anon { def unapply(t0: T0)(using t1: T1, t2: T2, ...): R = P.unapply(t0)(using t1, t2, ...) }; new $anon }.unapply` - // and the call `P.unapply(x1, x2, ...)` is inlined. + // An inline unapply `P.unapply` in a pattern `P[...](using ...)(x1,x2,...)(using t1: T1, t2: T2, ...)` is transformed into + // `{ class $anon { def unapply(s: S)(using t1: T1, t2: T2, ...): R = P.unapply[...](using ...)(s)(using t1, t2, ...) }; new $anon }.unapply(using y1,y2,...)` + // and the call `P.unapply[...](using ...)(x1, x2, ...)(using t1, t2, ...)` is inlined. // This serves as a placeholder for the inlined body until the `patternMatcher` phase. After pattern matcher // transforms the patterns into terms, the `inlinePatterns` phase removes this anonymous class by β-reducing // the call to the `unapply`. - object SplitFunAndGivenArgs: - def unapply(tree: Tree): (Tree, List[List[Tree]]) = tree match - case Apply(SplitFunAndGivenArgs(fn, argss), args) => (fn, argss :+ args) - case _ => (tree, Nil) - val UnApply(SplitFunAndGivenArgs(fun, leadingImplicits), trailingImplicits, patterns) = unapp - if leadingImplicits.flatten.nonEmpty then - // To support them see https://github.com/lampepfl/dotty/pull/13158 - report.error("inline unapply methods with given parameters before the scrutinee are not supported", fun) + val UnApply(fun, trailingImplicits, patterns) = unapp val sym = unapp.symbol var unapplySym1: Symbol = NoSymbol // created from within AnonClass() and used afterwards val newUnapply = AnonClass(ctx.owner, List(defn.ObjectType), sym.coord) { cls => - val targs = fun match - case TypeApply(_, targs) => targs - case _ => Nil - val unapplyInfo = sym.info match - case info: PolyType => info.instantiate(targs.map(_.tpe)) - case info => info - - val unapplySym = newSymbol(cls, sym.name.toTermName, Synthetic | Method, unapplyInfo, coord = sym.coord).entered + // `fun` is a partially applied method that contains all type applications of the method. + // The methodic type `fun.tpe.widen` is the type of the function starting from the scrutinee argument + // and its type parameters are instantiated. + val unapplySym = newSymbol(cls, sym.name.toTermName, Synthetic | Method, fun.tpe.widen, coord = sym.coord).entered val unapply = DefDef(unapplySym.asTerm, argss => - inlineCall(fun.appliedToArgss(argss).withSpan(unapp.span))(using ctx.withOwner(unapplySym)) + val body = fun.appliedToArgss(argss).withSpan(unapp.span) + if body.symbol.is(Transparent) then inlineCall(body)(using ctx.withOwner(unapplySym)) + else body ) unapplySym1 = unapplySym List(unapply) @@ -235,8 +230,8 @@ object Inlines: val retainer = meth.copy( name = BodyRetainerName(meth.name), - flags = meth.flags &~ (Inline | Macro | Override) | Private, - coord = mdef.rhs.span.startPos).asTerm + flags = (meth.flags &~ (Inline | Macro | Override | AbsOverride)) | Private, + coord = mdef.rhs.span.startPos).asTerm.entered retainer.deriveTargetNameAnnotation(meth, name => BodyRetainerName(name.asTermName)) DefDef(retainer, prefss => inlineCall( @@ -386,8 +381,7 @@ object Inlines: /** Expand call to scala.compiletime.codeOf */ def codeOf(arg: Tree, pos: SrcPos)(using Context): Tree = - val ctx1 = ctx.fresh.setSetting(ctx.settings.color, "never") - Literal(Constant(arg.show(using ctx1))).withSpan(pos.span) + Literal(Constant(arg.show(using ctx.withoutColors))).withSpan(pos.span) end Intrinsics /** Produces an inlined version of `call` via its `inlined` method. @@ -439,8 +433,7 @@ object Inlines: val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span) evidence.tpe match case fail: Implicits.SearchFailureType => - val msg = evTyper.missingArgMsg(evidence, tpt.tpe, "") - errorTree(call, em"$msg") + errorTree(call, evTyper.missingArgMsg(evidence, tpt.tpe, "")) case _ => evidence } diff --git a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala index 7e47bbfdfa8a..7a0d3f61cb33 100644 --- a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala @@ -17,11 +17,12 @@ import NameKinds.{InlineAccessorName, UniqueInlineName} import inlines.Inlines import NameOps._ import Annotations._ -import transform.{AccessProxies, PCPCheckAndHeal, Splicer} +import transform.{AccessProxies, Splicer} +import staging.CrossStageSafety import transform.SymUtils.* import config.Printers.inlining import util.Property -import dotty.tools.dotc.transform.TreeMapWithStages._ +import staging.StagingLevel object PrepareInlineable { import tpd._ @@ -73,7 +74,7 @@ object PrepareInlineable { !sym.isContainedIn(inlineSym) && !(sym.isStableMember && sym.info.widenTermRefExpr.isInstanceOf[ConstantType]) && !sym.isInlineMethod && - (Inlines.inInlineMethod || StagingContext.level > 0) + (Inlines.inInlineMethod || StagingLevel.level > 0) def preTransform(tree: Tree)(using Context): Tree @@ -90,8 +91,8 @@ object PrepareInlineable { } private def stagingContext(tree: Tree)(using Context): Context = tree match - case tree: Apply if tree.symbol.isQuote => StagingContext.quoteContext - case tree: Apply if tree.symbol.isExprSplice => StagingContext.spliceContext + case tree: Apply if tree.symbol.isQuote => StagingLevel.quoteContext + case tree: Apply if tree.symbol.isExprSplice => StagingLevel.spliceContext case _ => ctx } @@ -284,7 +285,7 @@ object PrepareInlineable { private def checkInlineMethod(inlined: Symbol, body: Tree)(using Context): body.type = { if Inlines.inInlineMethod(using ctx.outer) then - report.error(ex"Implementation restriction: nested inline methods are not supported", inlined.srcPos) + report.error(em"Implementation restriction: nested inline methods are not supported", inlined.srcPos) if (inlined.is(Macro) && !ctx.isAfterTyper) { @@ -293,7 +294,7 @@ object PrepareInlineable { if (code.symbol.flags.is(Inline)) report.error("Macro cannot be implemented with an `inline` method", code.srcPos) Splicer.checkValidMacroBody(code) - new PCPCheckAndHeal(freshStagingContext).transform(body) // Ignore output, only check PCP + (new CrossStageSafety).transform(body) // Ignore output, only check cross-stage safety case Block(List(stat), Literal(Constants.Constant(()))) => checkMacro(stat) case Block(Nil, expr) => checkMacro(expr) case Typed(expr, _) => checkMacro(expr) diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 414d406e870a..e4d0cce9f6f9 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -11,11 +11,13 @@ import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.Names.{Name, TermName} import dotty.tools.dotc.core.NameKinds.SimpleNameKind import dotty.tools.dotc.core.NameOps._ +import dotty.tools.dotc.core.Phases import dotty.tools.dotc.core.Scopes._ import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol, defn, newSymbol} import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.SymDenotations.SymDenotation import dotty.tools.dotc.core.TypeError +import dotty.tools.dotc.core.Phases import dotty.tools.dotc.core.Types.{AppliedType, ExprType, MethodOrPoly, NameFilter, NoType, RefinedType, TermRef, Type, TypeProxy} import dotty.tools.dotc.parsing.Tokens import dotty.tools.dotc.util.Chars @@ -45,7 +47,7 @@ object Completion { */ def completions(pos: SourcePosition)(using Context): (Int, List[Completion]) = { val path = Interactive.pathTo(ctx.compilationUnit.tpdTree, pos.span) - computeCompletions(pos, path)(using Interactive.contextOfPath(path)) + computeCompletions(pos, path)(using Interactive.contextOfPath(path).withPhase(Phases.typerPhase)) } /** diff --git a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala index 6b2237a09b3f..fd6d426f39bb 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala @@ -313,8 +313,8 @@ object Interactive { case _ => } localCtx - case tree @ Template(constr, parents, self, _) => - if ((constr :: self :: parents).contains(nested)) outer + case tree @ Template(constr, _, self, _) => + if ((constr :: self :: tree.parentsOrDerived).contains(nested)) outer else contextOfStat(tree.body, nested, tree.symbol, outer.inClassContext(self.symbol)) case _ => outer diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 4611554a01a3..daeebcbcc17c 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -71,10 +71,10 @@ object JavaParsers { } } - def syntaxError(msg: String, skipIt: Boolean): Unit = + def syntaxError(msg: Message, skipIt: Boolean): Unit = syntaxError(in.offset, msg, skipIt) - def syntaxError(offset: Int, msg: String, skipIt: Boolean): Unit = { + def syntaxError(offset: Int, msg: Message, skipIt: Boolean): Unit = { if (offset > lastErrorOffset) { syntaxError(msg, offset) // no more errors on this token. @@ -178,9 +178,7 @@ object JavaParsers { if (in.token != token) { val offsetToReport = in.offset val msg = - tokenString(token) + " expected but " + - tokenString(in.token) + " found." - + em"${tokenString(token)} expected but ${tokenString(in.token)} found." syntaxError(offsetToReport, msg, skipIt = true) } if (in.token == token) in.nextToken() @@ -271,7 +269,7 @@ object JavaParsers { case FLOAT => in.nextToken(); TypeTree(FloatType) case DOUBLE => in.nextToken(); TypeTree(DoubleType) case BOOLEAN => in.nextToken(); TypeTree(BooleanType) - case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree + case _ => syntaxError(em"illegal start of type", skipIt = true); errorTypeTree } } @@ -762,7 +760,7 @@ object JavaParsers { accept(SEMI) val names = buf.toList if (names.length < 2) { - syntaxError(start, "illegal import", skipIt = false) + syntaxError(start, em"illegal import", skipIt = false) List() } else { @@ -822,7 +820,7 @@ object JavaParsers { val iface = atSpan(start, nameOffset) { TypeDef( name, - makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.Trait | Flags.JavaInterface | Flags.Abstract) + makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.JavaInterface) } addCompanionObject(statics, iface) } @@ -858,10 +856,9 @@ object JavaParsers { } (statics.toList, members.toList) } - def annotationParents: List[Select] = List( - scalaAnnotationDot(tpnme.Annotation), - Select(javaLangDot(nme.annotation), tpnme.Annotation), - scalaAnnotationDot(tpnme.ClassfileAnnotation) + def annotationParents: List[Tree] = List( + javaLangObject(), + Select(javaLangDot(nme.annotation), tpnme.Annotation) ) def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = { accept(AT) @@ -877,7 +874,7 @@ object JavaParsers { List(constructorParams), TypeTree(), EmptyTree).withMods(Modifiers(Flags.JavaDefined)) val templ = makeTemplate(annotationParents, constr :: body, List(), true) val annot = atSpan(start, nameOffset) { - TypeDef(name, templ).withMods(mods | Flags.Abstract) + TypeDef(name, templ).withMods(mods | Flags.JavaInterface | Flags.JavaAnnotation) } addCompanionObject(statics, annot) } @@ -955,7 +952,7 @@ object JavaParsers { case INTERFACE => interfaceDecl(start, mods) case AT => annotationDecl(start, mods) case CLASS => classDecl(start, mods) - case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) + case _ => in.nextToken(); syntaxError(em"illegal start of type declaration", skipIt = true); List(errorTypeTree) } def tryConstant: Option[Constant] = { diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala index 1be8bdae6bd1..d21d4b85b5df 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala @@ -10,6 +10,7 @@ import JavaTokens._ import scala.annotation.{switch, tailrec} import util.Chars._ import PartialFunction.cond +import core.Decorators.em object JavaScanners { @@ -108,7 +109,7 @@ object JavaScanners { setStrVal() nextChar() else - error("unclosed string literal") + error(em"unclosed string literal") else nextChar() if ch != '\"' then // "" empty string literal @@ -127,7 +128,7 @@ object JavaScanners { setStrVal() } else - error("unclosed character literal") + error(em"unclosed character literal") case '=' => token = EQUALS @@ -298,7 +299,7 @@ object JavaScanners { nextChar() token = DOTDOTDOT } - else error("`.` character expected") + else error(em"`.` character expected") } case ';' => @@ -336,7 +337,7 @@ object JavaScanners { case SU => if (isAtEnd) token = EOF else { - error("illegal character") + error(em"illegal character") nextChar() } @@ -347,7 +348,7 @@ object JavaScanners { getIdentRest() } else { - error("illegal character: " + ch.toInt) + error(em"illegal character: ${ch.toInt}") nextChar() } } @@ -360,7 +361,7 @@ object JavaScanners { case _ => nextChar(); skipLineComment() } @tailrec def skipJavaComment(): Unit = ch match { - case SU => incompleteInputError("unclosed comment") + case SU => incompleteInputError(em"unclosed comment") case '*' => nextChar(); if (ch == '/') nextChar() else skipJavaComment() case _ => nextChar(); skipJavaComment() } @@ -480,7 +481,7 @@ object JavaScanners { nextChar() } if (ch != LF && ch != CR) { // CR-LF is already normalized into LF by `JavaCharArrayReader` - error("illegal text block open delimiter sequence, missing line terminator") + error(em"illegal text block open delimiter sequence, missing line terminator") return } nextChar() @@ -529,7 +530,7 @@ object JavaScanners { // Bail out if the block never did have an end if (!blockClosed) { - error("unclosed text block") + error(em"unclosed text block") return } @@ -642,14 +643,14 @@ object JavaScanners { while (i < len) { val d = digit2int(strVal.charAt(i), base) if (d < 0) { - error("malformed integer number") + error(em"malformed integer number") return 0 } if (value < 0 || limit / (base / divider) < value || limit - (d / divider) < value * (base / divider) && !(negated && limit == value * base - 1 + d)) { - error("integer number too large") + error(em"integer number too large") return 0 } value = value * base + d @@ -666,11 +667,11 @@ object JavaScanners { try { val value: Double = java.lang.Double.valueOf(strVal.toString).nn.doubleValue() if (value > limit) - error("floating point number too large") + error(em"floating point number too large") if (negated) -value else value } catch { case _: NumberFormatException => - error("malformed floating point number") + error(em"malformed floating point number") 0.0 } } diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 309dd8a20aba..15a639743c15 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -15,7 +15,7 @@ import core._ import Flags._ import Contexts._ import Names._ -import NameKinds.WildcardParamName +import NameKinds.{WildcardParamName, QualifiedName} import NameOps._ import ast.{Positioned, Trees} import ast.Trees._ @@ -30,7 +30,7 @@ import scala.annotation.tailrec import rewrites.Rewrites.{patch, overlapsPatch} import reporting._ import config.Feature -import config.Feature.{sourceVersion, migrateTo3} +import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports} import config.SourceVersion._ import config.SourceVersion @@ -142,7 +142,6 @@ object Parsers { val length = if offset == in.offset && in.name != null then in.name.show.length else 0 syntaxError(msg, Span(offset, offset + length)) lastErrorOffset = in.offset - end if /** Unconditionally issue an error at given span, without * updating lastErrorOffset. @@ -151,7 +150,7 @@ object Parsers { report.error(msg, source.atSpan(span)) def unimplementedExpr(using Context): Select = - Select(Select(rootDot(nme.scala), nme.Predef), nme.???) + Select(scalaDot(nme.Predef), nme.???) } trait OutlineParserCommon extends ParserCommon { @@ -188,9 +187,11 @@ object Parsers { def isIdent = in.isIdent def isIdent(name: Name) = in.isIdent(name) - def isPureArrow(name: Name): Boolean = ctx.settings.Ycc.value && isIdent(name) + def isPureArrow(name: Name): Boolean = isIdent(name) && Feature.pureFunsEnabled def isPureArrow: Boolean = isPureArrow(nme.PUREARROW) || isPureArrow(nme.PURECTXARROW) def isErased = isIdent(nme.erased) && in.erasedEnabled + // Are we seeing an `erased` soft keyword that will not be an identifier? + def isErasedKw = isErased && in.isSoftModifierInParamModifierPosition def isSimpleLiteral = simpleLiteralTokens.contains(in.token) || isIdent(nme.raw.MINUS) && numericLitTokens.contains(in.lookahead.token) @@ -259,9 +260,6 @@ object Parsers { in.skip() lastErrorOffset = in.offset - def warning(msg: Message, sourcePos: SourcePosition): Unit = - report.warning(msg, sourcePos) - def warning(msg: Message, offset: Int = in.offset): Unit = report.warning(msg, source.atSpan(Span(offset))) @@ -338,7 +336,7 @@ object Parsers { in.nextToken() recur(true, endSeen) else if in.token == END then - if endSeen then syntaxError("duplicate end marker") + if endSeen then syntaxError(em"duplicate end marker") checkEndMarker(stats) recur(sepSeen, endSeen = true) else if isStatSeqEnd || in.token == altEnd then @@ -350,7 +348,7 @@ object Parsers { val statFollows = mustStartStatTokens.contains(found) syntaxError( if noPrevStat then IllegalStartOfStatement(what, isModifier, statFollows) - else i"end of $what expected but ${showToken(found)} found") + else em"end of $what expected but ${showToken(found)} found") if mustStartStatTokens.contains(found) then false // it's a statement that might be legal in an outer context else @@ -452,7 +450,7 @@ object Parsers { */ def convertToParam(tree: Tree, mods: Modifiers): ValDef = def fail() = - syntaxError(s"not a legal formal parameter for a function literal", tree.span) + syntaxError(em"not a legal formal parameter for a function literal", tree.span) makeParameter(nme.ERROR, tree, mods) tree match case param: ValDef => @@ -467,6 +465,15 @@ object Parsers { case _ => fail() + /** Checks that tuples don't contain a parameter. */ + def checkNonParamTuple(t: Tree) = t match + case Tuple(ts) => ts.collectFirst { + case param: ValDef => + syntaxError(em"invalid parameter definition syntax in tuple value", param.span) + } + case _ => + + /** Convert (qual)ident to type identifier */ def convertToTypeId(tree: Tree): Tree = tree match { @@ -610,9 +617,9 @@ object Parsers { if in.isNewLine && !(nextIndentWidth < startIndentWidth) then warning( if startIndentWidth <= nextIndentWidth then - i"""Line is indented too far to the right, or a `{` is missing before: - | - |${t.tryToShow}""" + em"""Line is indented too far to the right, or a `{` is missing before: + | + |${t.tryToShow}""" else in.spaceTabMismatchMsg(startIndentWidth, nextIndentWidth), in.next.offset @@ -627,7 +634,7 @@ object Parsers { if in.isNewLine then val nextIndentWidth = in.indentWidth(in.next.offset) if in.currentRegion.indentWidth < nextIndentWidth then - warning(i"Line is indented too far to the right, or a `{` or `:` is missing", in.next.offset) + warning(em"Line is indented too far to the right, or a `{` or `:` is missing", in.next.offset) /* -------- REWRITES ----------------------------------------------------------- */ @@ -770,7 +777,7 @@ object Parsers { } }) canRewrite &= (in.isAfterLineEnd || statCtdTokens.contains(in.token)) // test (5) - if (canRewrite && (!underColonSyntax || in.fewerBracesEnabled)) { + if canRewrite && (!underColonSyntax || Feature.fewerBracesEnabled) then val openingPatchStr = if !colonRequired then "" else if testChar(startOpening - 1, Chars.isOperatorPart(_)) then " :" @@ -778,7 +785,6 @@ object Parsers { val (startClosing, endClosing) = closingElimRegion() patch(source, Span(startOpening, endOpening), openingPatchStr) patch(source, Span(startClosing, endClosing), "") - } t } @@ -949,7 +955,7 @@ object Parsers { lookahead.isArrow && { lookahead.nextToken() - lookahead.token == INDENT + lookahead.token == INDENT || lookahead.token == EOF } lookahead.nextToken() if lookahead.isIdent || lookahead.token == USCORE then @@ -960,11 +966,11 @@ object Parsers { isArrowIndent() else false - /** Under -Ycc: is the following token sequuence a capture set `{ref1, ..., refN}` - * followed by a token that can start a type? + /** Under captureChecking language import: is the following token sequence a + * capture set `{ref1, ..., refN}` followed by a token that can start a type? */ def followingIsCaptureSet(): Boolean = - ctx.settings.Ycc.value && { + Feature.ccEnabled && { val lookahead = in.LookaheadScanner() def followingIsTypeStart() = lookahead.nextToken() @@ -1017,7 +1023,7 @@ object Parsers { * body */ def isColonLambda = - in.fewerBracesEnabled && in.token == COLONfollow && followingIsLambdaAfterColon() + Feature.fewerBracesEnabled && in.token == COLONfollow && followingIsLambdaAfterColon() /** operand { infixop operand | MatchClause } [postfixop], * @@ -1074,7 +1080,7 @@ object Parsers { val name = in.name if name == nme.CONSTRUCTOR || name == nme.STATIC_CONSTRUCTOR then report.error( - i"""Illegal backquoted identifier: `` and `` are forbidden""", + em"""Illegal backquoted identifier: `` and `` are forbidden""", in.sourcePos()) in.nextToken() name @@ -1227,7 +1233,7 @@ object Parsers { null } catch { - case ex: FromDigitsException => syntaxErrorOrIncomplete(ex.getMessage) + case ex: FromDigitsException => syntaxErrorOrIncomplete(ex.getMessage.toMessage) } Literal(Constant(value)) } @@ -1345,11 +1351,16 @@ object Parsers { // note: next is defined here because current == NEWLINE if (in.token == NEWLINE && p(in.next.token)) newLineOpt() - def colonAtEOLOpt(): Unit = { + def acceptIndent() = + if in.token != INDENT then + syntaxErrorOrIncomplete(em"indented definitions expected, ${in} found") + + def colonAtEOLOpt(): Unit = possibleColonOffset = in.lastOffset in.observeColonEOL(inTemplate = false) - if in.token == COLONeol then in.nextToken() - } + if in.token == COLONeol then + in.nextToken() + acceptIndent() def argumentStart(): Unit = colonAtEOLOpt() @@ -1357,9 +1368,9 @@ object Parsers { in.nextToken() if in.indentWidth(in.offset) == in.currentRegion.indentWidth then report.errorOrMigrationWarning( - i"""This opening brace will start a new statement in Scala 3. - |It needs to be indented to the right to keep being treated as - |an argument to the previous expression.${rewriteNotice()}""", + em"""This opening brace will start a new statement in Scala 3. + |It needs to be indented to the right to keep being treated as + |an argument to the previous expression.${rewriteNotice()}""", in.sourcePos(), from = `3.0`) patch(source, Span(in.offset), " ") @@ -1369,8 +1380,7 @@ object Parsers { if in.lookahead.token == END then in.token = NEWLINE else in.nextToken() - if in.token != INDENT && in.token != LBRACE then - syntaxErrorOrIncomplete(i"indented definitions expected, ${in} found") + if in.token != LBRACE then acceptIndent() else newLineOptWhenFollowedBy(LBRACE) @@ -1411,7 +1421,7 @@ object Parsers { if in.token == END then val start = in.skipToken() if stats.isEmpty || !matchesAndSetEnd(stats.last) then - syntaxError("misaligned end marker", Span(start, in.lastCharOffset)) + syntaxError(em"misaligned end marker", Span(start, in.lastCharOffset)) else if overlapsPatch(source, Span(start, start)) then patch(source, Span(start, start), "") patch(source, Span(start, in.lastCharOffset), s"} // end $endName") @@ -1426,19 +1436,39 @@ object Parsers { */ def toplevelTyp(): Tree = rejectWildcardType(typ()) - private def isFunction(tree: Tree): Boolean = tree match { - case Parens(tree1) => isFunction(tree1) - case Block(Nil, tree1) => isFunction(tree1) - case _: Function => true - case _ => false + private def getFunction(tree: Tree): Option[Function] = tree match { + case Parens(tree1) => getFunction(tree1) + case Block(Nil, tree1) => getFunction(tree1) + case t: Function => Some(t) + case _ => None } + private def checkFunctionNotErased(f: Function, context: String) = + def fail(span: Span) = + syntaxError(em"Implementation restriction: erased parameters are not supported in $context", span) + // erased parameter in type + val hasErasedParam = f match + case f: FunctionWithMods => f.hasErasedParams + case _ => false + if hasErasedParam then + fail(f.span) + // erased parameter in term + val hasErasedMods = f.args.collectFirst { + case v: ValDef if v.mods.is(Flags.Erased) => v + } + hasErasedMods match + case Some(param) => fail(param.span) + case _ => + /** CaptureRef ::= ident | `this` */ def captureRef(): Tree = - if in.token == THIS then simpleRef() else termIdent() + if in.token == THIS then simpleRef() + else termIdent() match + case Ident(nme.CAPTURE_ROOT) => captureRoot + case id => id - /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under -Ycc + /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under captureChecking */ def captureSet(): List[Tree] = inBraces { if in.token == RBRACE then Nil else commaSeparated(captureRef) @@ -1449,12 +1479,12 @@ object Parsers { * | FunParamClause ‘=>>’ Type * | MatchType * | InfixType - * | CaptureSet Type -- under -Ycc + * | CaptureSet Type -- under captureChecking * FunType ::= (MonoFunType | PolyFunType) * MonoFunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type - * | (‘->’ | ‘?->’ ) Type -- under -Ycc + * | (‘->’ | ‘?->’ ) Type -- under pureFunctions * PolyFunType ::= HKTypeParamClause '=>' Type - * | HKTypeParamClause ‘->’ Type -- under -Ycc + * | HKTypeParamClause ‘->’ Type -- under pureFunctions * FunTypeArgs ::= InfixType * | `(' [ [ ‘[using]’ ‘['erased'] FunArgType {`,' FunArgType } ] `)' * | '(' [ ‘[using]’ ‘['erased'] TypedFunParam {',' TypedFunParam } ')' @@ -1462,6 +1492,7 @@ object Parsers { def typ(): Tree = val start = in.offset var imods = Modifiers() + var erasedArgs: ListBuffer[Boolean] = ListBuffer() def functionRest(params: List[Tree]): Tree = val paramSpan = Span(start, in.lastOffset) atSpan(start, in.offset) { @@ -1474,8 +1505,9 @@ object Parsers { if !imods.flags.isEmpty || params.isEmpty then syntaxError(em"illegal parameter list for type lambda", start) token = ARROW - else if ctx.settings.Ycc.value then - // `=>` means impure function under -Ycc whereas `->` is a regular function. + else if Feature.pureFunsEnabled then + // `=>` means impure function under pureFunctions or captureChecking + // language imports, whereas `->` is then a regular function. imods |= Impure if token == CTXARROW then @@ -1492,10 +1524,10 @@ object Parsers { if isByNameType(tpt) then syntaxError(em"parameter of type lambda may not be call-by-name", tpt.span) TermLambdaTypeTree(params.asInstanceOf[List[ValDef]], resultType) - else if imods.isOneOf(Given | Erased | Impure) then + else if imods.isOneOf(Given | Impure) || erasedArgs.contains(true) then if imods.is(Given) && params.isEmpty then - syntaxError("context function types require at least one parameter", paramSpan) - FunctionWithMods(params, resultType, imods) + syntaxError(em"context function types require at least one parameter", paramSpan) + FunctionWithMods(params, resultType, imods, erasedArgs.toList) else if !ctx.settings.YkindProjector.isDefault then val (newParams :+ newResultType, tparams) = replaceKindProjectorPlaceholders(params :+ resultType): @unchecked lambdaAbstract(tparams, Function(newParams, newResultType)) @@ -1513,17 +1545,30 @@ object Parsers { functionRest(Nil) } else { - if isErased then imods = addModifier(imods) val paramStart = in.offset + def addErased() = + erasedArgs.addOne(isErasedKw) + if isErasedKw then { in.skipToken(); } + addErased() val ts = in.currentRegion.withCommasExpected { funArgType() match case Ident(name) if name != tpnme.WILDCARD && in.isColon => isValParamList = true + def funParam(start: Offset, mods: Modifiers) = { + atSpan(start) { + addErased() + typedFunParam(in.offset, ident(), imods) + } + } commaSeparatedRest( typedFunParam(paramStart, name.toTermName, imods), - () => typedFunParam(in.offset, ident(), imods)) + () => funParam(in.offset, imods)) case t => - commaSeparatedRest(t, funArgType) + def funParam() = { + addErased() + funArgType() + } + commaSeparatedRest(t, funParam) } accept(RPAREN) if isValParamList || in.isArrow || isPureArrow then @@ -1554,11 +1599,13 @@ object Parsers { val arrowOffset = in.skipToken() val body = toplevelTyp() atSpan(start, arrowOffset) { - if (isFunction(body)) - PolyFunction(tparams, body) - else { - syntaxError("Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) - Ident(nme.ERROR.toTypeName) + getFunction(body) match { + case Some(f) => + checkFunctionNotErased(f, "poly function") + PolyFunction(tparams, body) + case None => + syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) + Ident(nme.ERROR.toTypeName) } } } @@ -1570,14 +1617,17 @@ object Parsers { else infixType() in.token match - case ARROW | CTXARROW => functionRest(t :: Nil) + case ARROW | CTXARROW => + erasedArgs.addOne(false) + functionRest(t :: Nil) case MATCH => matchType(t) case FORSOME => syntaxError(ExistentialTypesNoLongerSupported()); t case _ => if isPureArrow then + erasedArgs.addOne(false) functionRest(t :: Nil) else - if (imods.is(Erased) && !t.isInstanceOf[FunctionWithMods]) + if (erasedArgs.contains(true) && !t.isInstanceOf[FunctionWithMods]) syntaxError(ErasedTypesCanOnlyBeFunctionTypes(), implicitKwPos(start)) t end typ @@ -1711,7 +1761,7 @@ object Parsers { val hint = if inPattern then "Use lower cased variable name without the `$` instead" else "To use a given Type[T] in a quote just write T directly" - syntaxError(s"$msg\n\nHint: $hint", Span(start, in.lastOffset)) + syntaxError(em"$msg\n\nHint: $hint", Span(start, in.lastOffset)) Ident(nme.ERROR.toTypeName) else Splice(expr) @@ -1793,7 +1843,7 @@ object Parsers { if (!ctx.settings.YkindProjector.isDefault) { def fail(): Tree = { syntaxError( - "λ requires a single argument of the form X => ... or (X, Y) => ...", + em"λ requires a single argument of the form X => ... or (X, Y) => ...", Span(startOffset(t), in.lastOffset) ) AppliedTypeTree(applied, args) @@ -1879,7 +1929,7 @@ object Parsers { if in.token == ARROW || isPureArrow(nme.PUREARROW) then val isImpure = in.token == ARROW val tp = atSpan(in.skipToken()) { ByNameTypeTree(core()) } - if isImpure && ctx.settings.Ycc.value then ImpureByNameTypeTree(tp) else tp + if isImpure && Feature.pureFunsEnabled then ImpureByNameTypeTree(tp) else tp else if in.token == LBRACE && followingIsCaptureSet() then val start = in.offset val cs = captureSet() @@ -1888,10 +1938,10 @@ object Parsers { val tp = paramTypeOf(core) val tp1 = tp match case ImpureByNameTypeTree(tp1) => - syntaxError("explicit captureSet is superfluous for impure call-by-name type", start) + syntaxError(em"explicit captureSet is superfluous for impure call-by-name type", start) tp1 case CapturingTypeTree(_, tp1: ByNameTypeTree) => - syntaxError("only one captureSet is allowed here", start) + syntaxError(em"only one captureSet is allowed here", start) tp1 case _: ByNameTypeTree if startTpOffset > endCsOffset => report.warning( @@ -1906,6 +1956,13 @@ object Parsers { else core() + private def maybeInto(tp: () => Tree) = + if in.isIdent(nme.into) + && in.featureEnabled(Feature.into) + && canStartTypeTokens.contains(in.lookahead.token) + then atSpan(in.skipToken()) { Into(tp()) } + else tp() + /** FunArgType ::= Type * | `=>' Type * | [CaptureSet] `->' Type @@ -1918,10 +1975,10 @@ object Parsers { */ def paramType(): Tree = paramTypeOf(paramValueType) - /** ParamValueType ::= Type [`*'] + /** ParamValueType ::= [`into`] Type [`*'] */ def paramValueType(): Tree = { - val t = toplevelTyp() + val t = maybeInto(toplevelTyp) if (isIdent(nme.raw.STAR)) { in.nextToken() atSpan(startOffset(t)) { PostfixOp(t, Ident(tpnme.raw.STAR)) } @@ -1967,7 +2024,7 @@ object Parsers { } :: contextBounds(pname) else if in.token == VIEWBOUND then report.errorOrMigrationWarning( - "view bounds `<%' are no longer supported, use a context bound `:' instead", + em"view bounds `<%' are no longer supported, use a context bound `:' instead", in.sourcePos(), from = `3.0`) atSpan(in.skipToken()) { Function(Ident(pname) :: Nil, toplevelTyp()) @@ -2068,24 +2125,22 @@ object Parsers { def expr(location: Location): Tree = { val start = in.offset - def isSpecialClosureStart = in.lookahead.isIdent(nme.erased) && in.erasedEnabled in.token match case IMPLICIT => closure(start, location, modifiers(BitSet(IMPLICIT))) - case LPAREN if isSpecialClosureStart => - closure(start, location, Modifiers()) case LBRACKET => val start = in.offset val tparams = typeParamClause(ParamOwner.TypeParam) val arrowOffset = accept(ARROW) val body = expr(location) atSpan(start, arrowOffset) { - if (isFunction(body)) - PolyFunction(tparams, body) - else { - syntaxError("Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) - errorTermTree(arrowOffset) - } + getFunction(body) match + case Some(f) => + checkFunctionNotErased(f, "poly function") + PolyFunction(tparams, f) + case None => + syntaxError(em"Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) + errorTermTree(arrowOffset) } case _ => val saved = placeholderParams @@ -2103,7 +2158,9 @@ object Parsers { else if isWildcard(t) then placeholderParams = placeholderParams ::: saved t - else wrapPlaceholders(t) + else + checkNonParamTuple(t) + wrapPlaceholders(t) } def expr1(location: Location = Location.ElseWhere): Tree = in.token match @@ -2118,8 +2175,8 @@ object Parsers { } case DO => report.errorOrMigrationWarning( - i"""`do while ` is no longer supported, - |use `while ; do ()` instead.${rewriteNotice()}""", + em"""`do while ` is no longer supported, + |use `while ; do ()` instead.${rewriteNotice()}""", in.sourcePos(), from = `3.0`) val start = in.skipToken() atSpan(start) { @@ -2171,10 +2228,11 @@ object Parsers { else Literal(Constant(())) // finally without an expression } else { - if (handler.isEmpty) warning( - EmptyCatchAndFinallyBlock(body), - source.atSpan(Span(tryOffset, endOffset(body))) - ) + if handler.isEmpty then + report.warning( + EmptyCatchAndFinallyBlock(body), + source.atSpan(Span(tryOffset, endOffset(body))) + ) EmptyTree } ParsedTry(body, handler, finalizer) @@ -2294,10 +2352,8 @@ object Parsers { if in.token == RPAREN then Nil else - var mods1 = mods - if isErased then mods1 = addModifier(mods1) try - commaSeparated(() => binding(mods1)) + commaSeparated(() => binding(mods)) finally accept(RPAREN) else { @@ -2306,7 +2362,7 @@ object Parsers { val t = if ((in.token == COLONop || in.token == COLONfollow) && location == Location.InBlock) { report.errorOrMigrationWarning( - s"This syntax is no longer supported; parameter needs to be enclosed in (...)${rewriteNotice(`future-migration`)}", + em"This syntax is no longer supported; parameter needs to be enclosed in (...)${rewriteNotice(`future-migration`)}", source.atSpan(Span(start, in.lastOffset)), from = future) in.nextToken() @@ -2321,10 +2377,13 @@ object Parsers { (atSpan(start) { makeParameter(name, t, mods) }) :: Nil } - /** Binding ::= (id | `_') [`:' Type] + /** Binding ::= [`erased`] (id | `_') [`:' Type] */ def binding(mods: Modifiers): Tree = - atSpan(in.offset) { makeParameter(bindingName(), typedOpt(), mods) } + atSpan(in.offset) { + val mods1 = if isErasedKw then addModifier(mods) else mods + makeParameter(bindingName(), typedOpt(), mods1) + } def bindingName(): TermName = if (in.token == USCORE) { @@ -2343,7 +2402,7 @@ object Parsers { atSpan(start, in.offset) { if in.token == CTXARROW then if params.isEmpty then - syntaxError("context function literals require at least one formal parameter", Span(start, in.lastOffset)) + syntaxError(em"context function literals require at least one formal parameter", Span(start, in.lastOffset)) in.nextToken() else accept(ARROW) @@ -2357,7 +2416,7 @@ object Parsers { /** PostfixExpr ::= InfixExpr [id [nl]] * InfixExpr ::= PrefixExpr * | InfixExpr id [nl] InfixExpr - * | InfixExpr id `:` IndentedExpr + * | InfixExpr id ColonArgument * | InfixExpr MatchClause */ def postfixExpr(location: Location = Location.ElseWhere): Tree = @@ -2401,10 +2460,11 @@ object Parsers { * | SimpleExpr `.` MatchClause * | SimpleExpr (TypeArgs | NamedTypeArgs) * | SimpleExpr1 ArgumentExprs - * | SimpleExpr1 `:` ColonArgument -- under language.experimental.fewerBraces - * ColonArgument ::= indent (CaseClauses | Block) outdent - * | FunParams (‘=>’ | ‘?=>’) ColonArgBody - * | HkTypeParamClause ‘=>’ ColonArgBody + * | SimpleExpr1 ColonArgument + * ColonArgument ::= colon [LambdaStart] + * indent (CaseClauses | Block) outdent + * LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + * | HkTypeParamClause ‘=>’ * ColonArgBody ::= indent (CaseClauses | Block) outdent * Quoted ::= ‘'’ ‘{’ Block ‘}’ * | ‘'’ ‘[’ Type ‘]’ @@ -2520,6 +2580,7 @@ object Parsers { else in.currentRegion.withCommasExpected { var isFormalParams = false def exprOrBinding() = + if isErasedKw then isFormalParams = true if isFormalParams then binding(Modifiers()) else val t = exprInParens() @@ -2765,10 +2826,10 @@ object Parsers { CaseDef(pat, grd, atSpan(accept(ARROW)) { if exprOnly then if in.indentSyntax && in.isAfterLineEnd && in.token != INDENT then - warning(i"""Misleading indentation: this expression forms part of the preceding catch case. - |If this is intended, it should be indented for clarity. - |Otherwise, if the handler is intended to be empty, use a multi-line catch with - |an indented case.""") + warning(em"""Misleading indentation: this expression forms part of the preceding catch case. + |If this is intended, it should be indented for clarity. + |Otherwise, if the handler is intended to be empty, use a multi-line catch with + |an indented case.""") expr() else block() }) @@ -2809,11 +2870,14 @@ object Parsers { if (isIdent(nme.raw.BAR)) { in.nextToken(); pattern1(location) :: patternAlts(location) } else Nil - /** Pattern1 ::= Pattern2 [Ascription] + /** Pattern1 ::= PatVar Ascription + * | [‘-’] integerLiteral Ascription + * | [‘-’] floatingPointLiteral Ascription + * | Pattern2 */ def pattern1(location: Location = Location.InPattern): Tree = val p = pattern2() - if in.isColon then + if (isVarPattern(p) || p.isInstanceOf[Number]) && in.isColon then in.nextToken() ascription(p, location) else p @@ -2989,7 +3053,8 @@ object Parsers { inBrackets { if in.token == THIS then if sourceVersion.isAtLeast(future) then - deprecationWarning("The [this] qualifier will be deprecated in the future; it should be dropped.") + deprecationWarning( + em"The [this] qualifier will be deprecated in the future; it should be dropped.") in.nextToken() mods | Local else mods.withPrivateWithin(ident().toTypeName) @@ -3063,6 +3128,42 @@ object Parsers { /* -------- PARAMETERS ------------------------------------------- */ + /** DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent + * DefParamClause ::= DefTypeParamClause + * | DefTermParamClause + * | UsingParamClause + */ + def typeOrTermParamClauses( + ownerKind: ParamOwner, + numLeadParams: Int = 0 + ): List[List[TypeDef] | List[ValDef]] = + + def recur(firstClause: Boolean, numLeadParams: Int, prevIsTypeClause: Boolean): List[List[TypeDef] | List[ValDef]] = + newLineOptWhenFollowedBy(LPAREN) + newLineOptWhenFollowedBy(LBRACKET) + if in.token == LPAREN then + val paramsStart = in.offset + val params = termParamClause( + numLeadParams, + firstClause = firstClause) + val lastClause = params.nonEmpty && params.head.mods.flags.is(Implicit) + params :: ( + if lastClause then Nil + else recur(firstClause = false, numLeadParams + params.length, prevIsTypeClause = false)) + else if in.token == LBRACKET then + if prevIsTypeClause then + syntaxError( + em"Type parameter lists must be separated by a term or using parameter list", + in.offset + ) + typeParamClause(ownerKind) :: recur(firstClause, numLeadParams, prevIsTypeClause = true) + else Nil + end recur + + recur(firstClause = true, numLeadParams = numLeadParams, prevIsTypeClause = false) + end typeOrTermParamClauses + + /** ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ * ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] * id [HkTypeParamClause] TypeParamBounds @@ -3080,7 +3181,7 @@ object Parsers { def variance(vflag: FlagSet): FlagSet = if ownerKind == ParamOwner.Def || ownerKind == ParamOwner.TypeParam then - syntaxError(i"no `+/-` variance annotation allowed here") + syntaxError(em"no `+/-` variance annotation allowed here") in.nextToken() EmptyFlags else @@ -3116,34 +3217,39 @@ object Parsers { /** ContextTypes ::= FunArgType {‘,’ FunArgType} */ - def contextTypes(ofClass: Boolean, nparams: Int, impliedMods: Modifiers): List[ValDef] = + def contextTypes(ofClass: Boolean, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = val tps = commaSeparated(funArgType) - var counter = nparams + var counter = numLeadParams def nextIdx = { counter += 1; counter } val paramFlags = if ofClass then LocalParamAccessor else Param tps.map(makeSyntheticParameter(nextIdx, _, paramFlags | Synthetic | impliedMods.flags)) - /** ClsParamClause ::= ‘(’ [‘erased’] ClsParams ‘)’ | UsingClsParamClause - * UsingClsParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ + /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause + * UsingClsTermParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ * ClsParams ::= ClsParam {‘,’ ClsParam} * ClsParam ::= {Annotation} * - * DefParamClause ::= ‘(’ [‘erased’] DefParams ‘)’ | UsingParamClause - * UsingParamClause ::= ‘(’ ‘using’ [‘erased’] (DefParams | ContextTypes) ‘)’ - * DefParams ::= DefParam {‘,’ DefParam} - * DefParam ::= {Annotation} [‘inline’] Param + * TypelessClause ::= DefTermParamClause + * | UsingParamClause + * + * DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ + * UsingParamClause ::= ‘(’ ‘using’ (DefTermParams | ContextTypes) ‘)’ + * DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ + * DefTermParams ::= DefTermParam {‘,’ DefTermParam} + * DefTermParam ::= {Annotation} [‘erased’] [‘inline’] Param * * Param ::= id `:' ParamType [`=' Expr] * * @return the list of parameter definitions */ - def paramClause(nparams: Int, // number of parameters preceding this clause - ofClass: Boolean = false, // owner is a class - ofCaseClass: Boolean = false, // owner is a case class - prefix: Boolean = false, // clause precedes name of an extension method - givenOnly: Boolean = false, // only given parameters allowed - firstClause: Boolean = false // clause is the first in regular list of clauses - ): List[ValDef] = { + def termParamClause( + numLeadParams: Int, // number of parameters preceding this clause + ofClass: Boolean = false, // owner is a class + ofCaseClass: Boolean = false, // owner is a case class + prefix: Boolean = false, // clause precedes name of an extension method + givenOnly: Boolean = false, // only given parameters allowed + firstClause: Boolean = false // clause is the first in regular list of clauses + ): List[ValDef] = { var impliedMods: Modifiers = EmptyModifiers def addParamMod(mod: () => Mod) = impliedMods = addMod(impliedMods, atSpan(in.skipToken()) { mod() }) @@ -3154,12 +3260,12 @@ object Parsers { else if isIdent(nme.using) then addParamMod(() => Mod.Given()) - if isErased then - addParamMod(() => Mod.Erased()) def param(): ValDef = { val start = in.offset var mods = impliedMods.withAnnotations(annotations()) + if isErasedKw then + mods = addModifier(mods) if (ofClass) { mods = addFlag(modifiers(start = mods), ParamAccessor) mods = @@ -3170,8 +3276,8 @@ object Parsers { val mod = atSpan(in.skipToken()) { Mod.Var() } addMod(mods, mod) else - if (!(mods.flags &~ (ParamAccessor | Inline | impliedMods.flags)).isEmpty) - syntaxError("`val` or `var` expected") + if (!(mods.flags &~ (ParamAccessor | Inline | Erased | impliedMods.flags)).isEmpty) + syntaxError(em"`val` or `var` expected") if (firstClause && ofCaseClass) mods else mods | PrivateLocal } @@ -3208,7 +3314,7 @@ object Parsers { checkVarArgsRules(rest) } - // begin paramClause + // begin termParamClause inParens { if in.token == RPAREN && !prefix && !impliedMods.is(Given) then Nil else @@ -3217,34 +3323,46 @@ object Parsers { else paramMods() if givenOnly && !impliedMods.is(Given) then - syntaxError("`using` expected") - val isParams = - !impliedMods.is(Given) - || startParamTokens.contains(in.token) - || isIdent && (in.name == nme.inline || in.lookahead.isColon) - if isParams then commaSeparated(() => param()) - else contextTypes(ofClass, nparams, impliedMods) + syntaxError(em"`using` expected") + val (firstParamMod, isParams) = + var mods = EmptyModifiers + if in.lookahead.isColon then + (mods, true) + else + if isErased then mods = addModifier(mods) + val isParams = + !impliedMods.is(Given) + || startParamTokens.contains(in.token) + || isIdent && (in.name == nme.inline || in.lookahead.isColon) + (mods, isParams) + (if isParams then commaSeparated(() => param()) + else contextTypes(ofClass, numLeadParams, impliedMods)) match { + case Nil => Nil + case (h :: t) => h.withAddedFlags(firstParamMod.flags) :: t + } checkVarArgsRules(clause) clause } } - /** ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] - * DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] + /** ClsTermParamClauses ::= {ClsTermParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] + * TypelessClauses ::= TypelessClause {TypelessClause} * * @return The parameter definitions */ - def paramClauses(ofClass: Boolean = false, - ofCaseClass: Boolean = false, - givenOnly: Boolean = false, - numLeadParams: Int = 0): List[List[ValDef]] = + def termParamClauses( + ofClass: Boolean = false, + ofCaseClass: Boolean = false, + givenOnly: Boolean = false, + numLeadParams: Int = 0 + ): List[List[ValDef]] = - def recur(firstClause: Boolean, nparams: Int): List[List[ValDef]] = + def recur(firstClause: Boolean, numLeadParams: Int): List[List[ValDef]] = newLineOptWhenFollowedBy(LPAREN) if in.token == LPAREN then val paramsStart = in.offset - val params = paramClause( - nparams, + val params = termParamClause( + numLeadParams, ofClass = ofClass, ofCaseClass = ofCaseClass, givenOnly = givenOnly, @@ -3252,12 +3370,12 @@ object Parsers { val lastClause = params.nonEmpty && params.head.mods.flags.is(Implicit) params :: ( if lastClause then Nil - else recur(firstClause = false, nparams + params.length)) + else recur(firstClause = false, numLeadParams + params.length)) else Nil end recur recur(firstClause = true, numLeadParams) - end paramClauses + end termParamClauses /* -------- DEFS ------------------------------------------- */ @@ -3294,25 +3412,25 @@ object Parsers { languageImport(tree) match case Some(prefix) => in.languageImportContext = in.languageImportContext.importContext(imp, NoSymbol) - for - case ImportSelector(id @ Ident(imported), EmptyTree, _) <- selectors - if allSourceVersionNames.contains(imported) - do - if !outermost then - syntaxError(i"source version import is only allowed at the toplevel", id.span) - else if ctx.compilationUnit.sourceVersion.isDefined then - syntaxError(i"duplicate source version import", id.span) - else if illegalSourceVersionNames.contains(imported) then - val candidate = - val nonMigration = imported.toString.replace("-migration", "") - validSourceVersionNames.find(_.show == nonMigration) - val baseMsg = i"`$imported` is not a valid source version" - val msg = candidate match - case Some(member) => i"$baseMsg, did you mean language.`$member`?" - case _ => baseMsg - syntaxError(msg, id.span) - else - ctx.compilationUnit.sourceVersion = Some(SourceVersion.valueOf(imported.toString)) + for case ImportSelector(id @ Ident(imported), EmptyTree, _) <- selectors do + if Feature.handleGlobalLanguageImport(prefix, imported) && !outermost then + syntaxError(em"this language import is only allowed at the toplevel", id.span) + if allSourceVersionNames.contains(imported) && prefix.isEmpty then + if !outermost then + syntaxError(em"source version import is only allowed at the toplevel", id.span) + else if ctx.compilationUnit.sourceVersion.isDefined then + syntaxError(em"duplicate source version import", id.span) + else if illegalSourceVersionNames.contains(imported) then + val candidate = + val nonMigration = imported.toString.replace("-migration", "") + validSourceVersionNames.find(_.show == nonMigration) + val baseMsg = em"`$imported` is not a valid source version" + val msg = candidate match + case Some(member) => baseMsg.append(i", did you mean language.`$member`?") + case _ => baseMsg + syntaxError(msg, id.span) + else + ctx.compilationUnit.sourceVersion = Some(SourceVersion.valueOf(imported.toString)) case None => imp @@ -3371,7 +3489,7 @@ object Parsers { case _ => if isIdent(nme.raw.STAR) then wildcardSelector() else - if !idOK then syntaxError(i"named imports cannot follow wildcard imports") + if !idOK then syntaxError(em"named imports cannot follow wildcard imports") namedSelector(termIdent()) } @@ -3471,7 +3589,8 @@ object Parsers { if sourceVersion.isAtLeast(future) then deprecationWarning( em"""`= _` has been deprecated; use `= uninitialized` instead. - |`uninitialized` can be imported with `scala.compiletime.uninitialized`.""", rhsOffset) + |`uninitialized` can be imported with `scala.compiletime.uninitialized`.""", + rhsOffset) placeholderParams = placeholderParams.tail atSpan(rhs0.span) { Ident(nme.WILDCARD) } case rhs0 => rhs0 @@ -3497,11 +3616,15 @@ object Parsers { } } + + /** DefDef ::= DefSig [‘:’ Type] ‘=’ Expr - * | this ParamClause ParamClauses `=' ConstrExpr + * | this TypelessClauses [DefImplicitClause] `=' ConstrExpr * DefDcl ::= DefSig `:' Type - * DefSig ::= id [DefTypeParamClause] DefParamClauses - * | ExtParamClause [nl] [‘.’] id DefParamClauses + * DefSig ::= id [DefTypeParamClause] DefTermParamClauses + * + * if clauseInterleaving is enabled: + * DefSig ::= id [DefParamClauses] [DefImplicitClause] */ def defDefOrDcl(start: Offset, mods: Modifiers, numLeadParams: Int = 0): DefDef = atSpan(start, nameStart) { @@ -3511,7 +3634,7 @@ object Parsers { else ": Unit " // trailing space ensures that `def f()def g()` works. if migrateTo3 then report.errorOrMigrationWarning( - s"Procedure syntax no longer supported; `$toInsert` should be inserted here", + em"Procedure syntax no longer supported; `$toInsert` should be inserted here", in.sourcePos(), from = `3.0`) patch(source, Span(in.lastOffset), toInsert) true @@ -3520,10 +3643,10 @@ object Parsers { if (in.token == THIS) { in.nextToken() - val vparamss = paramClauses(numLeadParams = numLeadParams) + val vparamss = termParamClauses(numLeadParams = numLeadParams) if (vparamss.isEmpty || vparamss.head.take(1).exists(_.mods.isOneOf(GivenOrImplicit))) in.token match { - case LBRACKET => syntaxError("no type parameters allowed here") + case LBRACKET => syntaxError(em"no type parameters allowed here") case EOF => incompleteInputError(AuxConstructorNeedsNonImplicitParameter()) case _ => syntaxError(AuxConstructorNeedsNonImplicitParameter(), nameStart) } @@ -3538,9 +3661,18 @@ object Parsers { val mods1 = addFlag(mods, Method) val ident = termIdent() var name = ident.name.asTermName - val tparams = typeParamClauseOpt(ParamOwner.Def) - val vparamss = paramClauses(numLeadParams = numLeadParams) + val paramss = + if in.featureEnabled(Feature.clauseInterleaving) then + // If you are making interleaving stable manually, please refer to the PR introducing it instead, section "How to make non-experimental" + typeOrTermParamClauses(ParamOwner.Def, numLeadParams = numLeadParams) + else + val tparams = typeParamClauseOpt(ParamOwner.Def) + val vparamss = termParamClauses(numLeadParams = numLeadParams) + + joinParams(tparams, vparamss) + var tpt = fromWithinReturnType { typedOpt() } + if (migrateTo3) newLineOptWhenFollowedBy(LBRACE) val rhs = if in.token == EQUALS then @@ -3557,7 +3689,7 @@ object Parsers { accept(EQUALS) expr() - val ddef = DefDef(name, joinParams(tparams, vparamss), tpt, rhs) + val ddef = DefDef(name, paramss, tpt, rhs) if (isBackquoted(ident)) ddef.pushAttachment(Backquoted, ()) finalizeDef(ddef, mods1, start) } @@ -3616,13 +3748,13 @@ object Parsers { case TypeBoundsTree(EmptyTree, upper, _) => rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) case _ => - syntaxError(i"cannot combine lower bound and match type alias", eqOffset) + syntaxError(em"cannot combine lower bound and match type alias", eqOffset) } case _ => if mods.is(Opaque) then rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) else - syntaxError(i"cannot combine bound and alias", eqOffset) + syntaxError(em"cannot combine bound and alias", eqOffset) } makeTypeDef(rhs) } @@ -3678,12 +3810,12 @@ object Parsers { val templ = templateOpt(constr) finalizeDef(TypeDef(name, templ), mods, start) - /** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses + /** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsTermParamClauses */ def classConstr(isCaseClass: Boolean = false): DefDef = atSpan(in.lastOffset) { val tparams = typeParamClauseOpt(ParamOwner.Class) val cmods = fromWithinClassConstr(constrModsOpt()) - val vparamss = paramClauses(ofClass = true, ofCaseClass = isCaseClass) + val vparamss = termParamClauses(ofClass = true, ofCaseClass = isCaseClass) makeConstructor(tparams, vparamss).withMods(cmods) } @@ -3703,7 +3835,7 @@ object Parsers { private def checkAccessOnly(mods: Modifiers, where: String): Modifiers = val mods1 = mods & (AccessFlags | Enum) if mods1 ne mods then - syntaxError(s"Only access modifiers are allowed on enum $where") + syntaxError(em"Only access modifiers are allowed on enum $where") mods1 /** EnumDef ::= id ClassConstr InheritClauses EnumBody @@ -3759,17 +3891,17 @@ object Parsers { vparamss: List[List[Tree]], stat: Tree): Unit = stat match { case stat: DefDef => if stat.mods.is(ExtensionMethod) && vparamss.nonEmpty then - syntaxError(i"no extension method allowed here since leading parameter was already given", stat.span) + syntaxError(em"no extension method allowed here since leading parameter was already given", stat.span) else if !stat.mods.is(ExtensionMethod) && vparamss.isEmpty then - syntaxError(i"an extension method is required here", stat.span) + syntaxError(em"an extension method is required here", stat.span) else if tparams.nonEmpty && stat.leadingTypeParams.nonEmpty then - syntaxError(i"extension method cannot have type parameters since some were already given previously", + syntaxError(em"extension method cannot have type parameters since some were already given previously", stat.leadingTypeParams.head.span) else if stat.rhs.isEmpty then - syntaxError(i"extension method cannot be abstract", stat.span) + syntaxError(em"extension method cannot be abstract", stat.span) case EmptyTree => case stat => - syntaxError(i"extension clause can only define methods", stat.span) + syntaxError(em"extension clause can only define methods", stat.span) } /** GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) @@ -3785,14 +3917,14 @@ object Parsers { newLineOpt() val vparamss = if in.token == LPAREN && in.lookahead.isIdent(nme.using) - then paramClauses(givenOnly = true) + then termParamClauses(givenOnly = true) else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty if !(name.isEmpty && noParams) then acceptColon() val parents = if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil - else constrApp() :: withConstrApps() + else refinedTypeRest(constrApp()) :: withConstrApps() val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then accept(EQUALS) @@ -3820,33 +3952,33 @@ object Parsers { finalizeDef(gdef, mods1, start) } - /** Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefParam ‘)’ + /** Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ * {UsingParamClause} ExtMethods */ def extension(): ExtMethods = val start = in.skipToken() val tparams = typeParamClauseOpt(ParamOwner.Def) val leadParamss = ListBuffer[List[ValDef]]() - def nparams = leadParamss.map(_.length).sum + def numLeadParams = leadParamss.map(_.length).sum while - val extParams = paramClause(nparams, prefix = true) + val extParams = termParamClause(numLeadParams, prefix = true) leadParamss += extParams isUsingClause(extParams) do () - leadParamss ++= paramClauses(givenOnly = true, numLeadParams = nparams) + leadParamss ++= termParamClauses(givenOnly = true, numLeadParams = numLeadParams) if in.isColon then - syntaxError("no `:` expected here") + syntaxError(em"no `:` expected here") in.nextToken() val methods: List[Tree] = if in.token == EXPORT then exportClause() else if isDefIntro(modifierTokens) then - extMethod(nparams) :: Nil + extMethod(numLeadParams) :: Nil else in.observeIndented() newLineOptWhenFollowedBy(LBRACE) - if in.isNestedStart then inDefScopeBraces(extMethods(nparams)) - else { syntaxErrorOrIncomplete("Extension without extension methods") ; Nil } + if in.isNestedStart then inDefScopeBraces(extMethods(numLeadParams)) + else { syntaxErrorOrIncomplete(em"Extension without extension methods") ; Nil } val result = atSpan(start)(ExtMethods(joinParams(tparams, leadParamss.toList), methods)) val comment = in.getDocComment(start) if comment.isDefined then @@ -3879,7 +4011,7 @@ object Parsers { meths += defDefOrDcl(start, mods, numLeadParams) in.token != EOF && statSepOrEnd(meths, what = "extension method") do () - if meths.isEmpty then syntaxErrorOrIncomplete("`def` expected") + if meths.isEmpty then syntaxErrorOrIncomplete(em"`def` expected") meths.toList } @@ -3925,7 +4057,7 @@ object Parsers { in.nextToken() if (in.token == LBRACE || in.token == COLONeol) { report.errorOrMigrationWarning( - "`extends` must be followed by at least one parent", + em"`extends` must be followed by at least one parent", in.sourcePos(), from = `3.0`) Nil } @@ -4067,7 +4199,7 @@ object Parsers { in.token = SELFARROW // suppresses INDENT insertion after `=>` in.nextToken() else - syntaxError("`=>` expected after self type") + syntaxError(em"`=>` expected after self type") makeSelfDef(selfName, selfTpt) } else EmptyValDef @@ -4114,24 +4246,26 @@ object Parsers { def refineStatSeq(): List[Tree] = { val stats = new ListBuffer[Tree] def checkLegal(tree: Tree): List[Tree] = - val problem = tree match + def ok = tree :: Nil + def fail(msg: Message) = + syntaxError(msg, tree.span) + Nil + tree match case tree: ValDef if tree.mods.is(Mutable) => - i"""refinement cannot be a mutable var. - |You can use an explicit getter ${tree.name} and setter ${tree.name}_= instead""" + fail(em"""refinement cannot be a mutable var. + |You can use an explicit getter ${tree.name} and setter ${tree.name}_= instead""") case tree: MemberDef if !(tree.mods.flags & ModifierFlags).isEmpty => - i"refinement cannot be ${(tree.mods.flags & ModifierFlags).flagStrings().mkString("`", "`, `", "`")}" + fail(em"refinement cannot be ${(tree.mods.flags & ModifierFlags).flagStrings().mkString("`", "`, `", "`")}") case tree: DefDef if tree.termParamss.nestedExists(!_.rhs.isEmpty) => - i"refinement cannot have default arguments" + fail(em"refinement cannot have default arguments") case tree: ValOrDefDef => - if tree.rhs.isEmpty then "" - else "refinement cannot have a right-hand side" + if tree.rhs.isEmpty then ok + else fail(em"refinement cannot have a right-hand side") case tree: TypeDef => - if !tree.isClassDef then "" - else "refinement cannot be a class or trait" + if !tree.isClassDef then ok + else fail(em"refinement cannot be a class or trait") case _ => - "this kind of definition cannot be a refinement" - if problem.isEmpty then tree :: Nil - else { syntaxError(problem, tree.span); Nil } + fail(em"this kind of definition cannot be a refinement") while val dclFound = isDclIntro diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 082112d800d9..b3d824a2efd2 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -17,9 +17,11 @@ import scala.collection.mutable import scala.collection.immutable.SortedMap import rewrites.Rewrites.patch import config.Feature -import config.Feature.migrateTo3 +import config.Feature.{migrateTo3, fewerBracesEnabled} import config.SourceVersion.`3.0` -import reporting.{NoProfile, Profile} +import reporting.{NoProfile, Profile, Message} + +import java.util.Objects object Scanners { @@ -100,18 +102,22 @@ object Scanners { */ var errOffset: Offset = NoOffset + /** Implements CharArrayReader's error method */ + protected def error(msg: String, off: Offset): Unit = + error(msg.toMessage, off) + /** Generate an error at the given offset */ - def error(msg: String, off: Offset = offset): Unit = { + def error(msg: Message, off: Offset = offset): Unit = { errorButContinue(msg, off) token = ERROR errOffset = off } - def errorButContinue(msg: String, off: Offset = offset): Unit = + def errorButContinue(msg: Message, off: Offset = offset): Unit = report.error(msg, sourcePos(off)) /** signal an error where the input ended in the middle of a token */ - def incompleteInputError(msg: String): Unit = { + def incompleteInputError(msg: Message): Unit = { report.incompleteInputError(msg, sourcePos()) token = EOF errOffset = offset @@ -122,9 +128,11 @@ object Scanners { // Setting token data ---------------------------------------------------- + protected def initialCharBufferSize = 1024 + /** A character buffer for literals */ - protected val litBuf = CharBuffer() + protected val litBuf = CharBuffer(initialCharBufferSize) /** append Unicode character to "litBuf" buffer */ @@ -159,7 +167,7 @@ object Scanners { // disallow trailing numeric separator char, but continue lexing def checkNoTrailingSeparator(): Unit = if (!litBuf.isEmpty && isNumberSeparator(litBuf.last)) - errorButContinue("trailing separator is not allowed", offset + litBuf.length - 1) + errorButContinue(em"trailing separator is not allowed", offset + litBuf.length - 1) } class Scanner(source: SourceFile, override val startFrom: Offset = 0, profile: Profile = NoProfile, allowIndent: Boolean = true)(using Context) extends ScannerCommon(source) { @@ -192,7 +200,7 @@ object Scanners { val rewriteTargets = List(s.newSyntax, s.oldSyntax, s.indent, s.noindent) val enabled = rewriteTargets.filter(_.value) if (enabled.length > 1) - error(s"illegal combination of -rewrite targets: ${enabled(0).name} and ${enabled(1).name}") + error(em"illegal combination of -rewrite targets: ${enabled(0).name} and ${enabled(1).name}") } private var myLanguageImportContext: Context = ctx @@ -202,25 +210,6 @@ object Scanners { def featureEnabled(name: TermName) = Feature.enabled(name)(using languageImportContext) def erasedEnabled = featureEnabled(Feature.erasedDefinitions) - private inline val fewerBracesByDefault = false - // turn on to study impact on codebase if `fewerBraces` was the default - - private var fewerBracesEnabledCache = false - private var fewerBracesEnabledCtx: Context = NoContext - - def fewerBracesEnabled = - if fewerBracesEnabledCtx ne myLanguageImportContext then - fewerBracesEnabledCache = - featureEnabled(Feature.fewerBraces) - || fewerBracesByDefault && indentSyntax && !migrateTo3 - // ensure that fewer braces is not the default for 3.0-migration since - // { x: T => - // expr - // } - // would be ambiguous - fewerBracesEnabledCtx = myLanguageImportContext - fewerBracesEnabledCache - private var postfixOpsEnabledCache = false private var postfixOpsEnabledCtx: Context = NoContext @@ -257,14 +246,14 @@ object Scanners { def getDocComment(pos: Int): Option[Comment] = docstringMap.get(pos) /** A buffer for comments */ - private val commentBuf = CharBuffer() + private val commentBuf = CharBuffer(initialCharBufferSize) def toToken(identifier: SimpleName): Token = def handleMigration(keyword: Token): Token = if scala3keywords.contains(keyword) && migrateTo3 then val what = tokenString(keyword) report.errorOrMigrationWarning( - i"$what is now a keyword, write `$what` instead of $what to keep it as an identifier", + em"$what is now a keyword, write `$what` instead of $what to keep it as an identifier", sourcePos(), from = `3.0`) patch(source, Span(offset), "`") @@ -566,7 +555,7 @@ object Scanners { // If nextWidth is an indentation level not yet seen by enclosing indentation // region, invoke `handler`. - def handleNewIndentWidth(r: Region, handler: Indented => Unit): Unit = r match + inline def handleNewIndentWidth(r: Region, inline handler: Indented => Unit): Unit = r match case r @ Indented(curWidth, prefix, outer) if curWidth < nextWidth && !r.otherIndentWidths.contains(nextWidth) && nextWidth != lastWidth => handler(r) @@ -584,7 +573,7 @@ object Scanners { * they start with `(`, `[` or `{`, or the last statement ends in a `return`. * The Scala 2 rules apply under source `3.0-migration` or under `-no-indent`. */ - def isContinuing = + inline def isContinuing = lastWidth < nextWidth && (openParensTokens.contains(token) || lastToken == RETURN) && !pastBlankLine @@ -621,10 +610,11 @@ object Scanners { case r: Indented => insert(OUTDENT, offset) handleNewIndentWidth(r.enclosing, ir => + val lw = lastWidth errorButContinue( - i"""The start of this line does not match any of the previous indentation widths. - |Indentation width of current line : $nextWidth - |This falls between previous widths: ${ir.width} and $lastWidth""")) + em"""The start of this line does not match any of the previous indentation widths. + |Indentation width of current line : $nextWidth + |This falls between previous widths: ${ir.width} and $lw""")) case r => if skipping then if r.enclosing.isClosedByUndentAt(nextWidth) then @@ -640,16 +630,17 @@ object Scanners { else if lastToken == SELFARROW then currentRegion.knownWidth = nextWidth else if (lastWidth != nextWidth) - errorButContinue(spaceTabMismatchMsg(lastWidth, nextWidth)) + val lw = lastWidth + errorButContinue(spaceTabMismatchMsg(lw, nextWidth)) if token != OUTDENT then handleNewIndentWidth(currentRegion, _.otherIndentWidths += nextWidth) if next.token == EMPTY then profile.recordNewLine() end handleNewLine - def spaceTabMismatchMsg(lastWidth: IndentWidth, nextWidth: IndentWidth) = - i"""Incompatible combinations of tabs and spaces in indentation prefixes. - |Previous indent : $lastWidth + def spaceTabMismatchMsg(lastWidth: IndentWidth, nextWidth: IndentWidth): Message = + em"""Incompatible combinations of tabs and spaces in indentation prefixes. + |Previous indent : $lastWidth |Latest indent : $nextWidth""" def observeColonEOL(inTemplate: Boolean): Unit = @@ -792,22 +783,24 @@ object Scanners { private def isSupplementary(high: Char, test: Int => Boolean, strict: Boolean = true): Boolean = isHighSurrogate(high) && { var res = false - nextChar() - val low = ch + val low = lookaheadChar() if isLowSurrogate(low) then - nextChar() val codepoint = toCodePoint(high, low) - if isValidCodePoint(codepoint) && test(codepoint) then - putChar(high) - putChar(low) - res = true + if isValidCodePoint(codepoint) then + if test(codepoint) then + putChar(high) + putChar(low) + nextChar() + nextChar() + res = true else - error(s"illegal character '${toUnicode(high)}${toUnicode(low)}'") + error(em"illegal character '${toUnicode(high)}${toUnicode(low)}'") else if !strict then putChar(high) + nextChar() res = true else - error(s"illegal character '${toUnicode(high)}' missing low surrogate") + error(em"illegal character '${toUnicode(high)}' missing low surrogate") res } private def atSupplementary(ch: Char, f: Int => Boolean): Boolean = @@ -884,7 +877,7 @@ object Scanners { case _ => base = 10 ; putChar('0') } if (base != 10 && !isNumberSeparator(ch) && digit2int(ch, base) < 0) - error("invalid literal number") + error(em"invalid literal number") } fetchLeadingZero() getNumber() @@ -904,7 +897,6 @@ object Scanners { if (ch == '\"') { if (lookaheadChar() == '\"') { nextRawChar() - //offset += 3 // first part is positioned at the quote nextRawChar() stringPart(multiLine = true) } @@ -915,7 +907,6 @@ object Scanners { } } else { - //offset += 1 // first part is positioned at the quote stringPart(multiLine = false) } } @@ -950,13 +941,13 @@ object Scanners { val isEmptyCharLit = (ch == '\'') getLitChar() if ch == '\'' then - if isEmptyCharLit then error("empty character literal (use '\\'' for single quote)") - else if litBuf.length != 1 then error("illegal codepoint in Char constant: " + litBuf.toString.map(toUnicode).mkString("'", "", "'")) + if isEmptyCharLit then error(em"empty character literal (use '\\'' for single quote)") + else if litBuf.length != 1 then error(em"illegal codepoint in Char constant: ${litBuf.toString.map(toUnicode).mkString("'", "", "'")}") else finishCharLit() - else if isEmptyCharLit then error("empty character literal") - else error("unclosed character literal") + else if isEmptyCharLit then error(em"empty character literal") + else error(em"unclosed character literal") case _ => - error("unclosed character literal") + error(em"unclosed character literal") } } fetchSingleQuote() @@ -987,35 +978,34 @@ object Scanners { case SU => if (isAtEnd) token = EOF else { - error("illegal character") + error(em"illegal character") nextChar() } case _ => def fetchOther() = - if (ch == '\u21D2') { + if ch == '\u21D2' then nextChar(); token = ARROW - report.deprecationWarning("The unicode arrow `⇒` is deprecated, use `=>` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", sourcePos(offset)) - } - else if (ch == '\u2190') { + report.deprecationWarning(em"The unicode arrow `⇒` is deprecated, use `=>` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", sourcePos(offset)) + else if ch == '\u2190' then nextChar(); token = LARROW - report.deprecationWarning("The unicode arrow `←` is deprecated, use `<-` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", sourcePos(offset)) - } - else if (Character.isUnicodeIdentifierStart(ch)) { + report.deprecationWarning(em"The unicode arrow `←` is deprecated, use `<-` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", sourcePos(offset)) + else if isUnicodeIdentifierStart(ch) then putChar(ch) nextChar() getIdentRest() - } - else if (isSpecial(ch)) { + if ch == '"' && token == IDENTIFIER then token = INTERPOLATIONID + else if isSpecial(ch) then putChar(ch) nextChar() getOperatorRest() - } else if isSupplementary(ch, isUnicodeIdentifierStart) then getIdentRest() - else { - error(s"illegal character '${toUnicode(ch)}'") + if ch == '"' && token == IDENTIFIER then token = INTERPOLATIONID + else if isSupplementary(ch, isSpecial) then + getOperatorRest() + else + error(em"illegal character '${toUnicode(ch)}'") nextChar() - } fetchOther() } } @@ -1043,7 +1033,7 @@ object Scanners { if (ch == '/') nextChar() else skipComment() } - else if (ch == SU) incompleteInputError("unclosed comment") + else if (ch == SU) incompleteInputError(em"unclosed comment") else { nextChar(); skipComment() } def nestedComment() = { nextChar(); skipComment() } val start = lastCharOffset @@ -1091,6 +1081,7 @@ object Scanners { next class LookaheadScanner(val allowIndent: Boolean = false) extends Scanner(source, offset, allowIndent = allowIndent) { + override protected def initialCharBufferSize = 8 override def languageImportContext = Scanner.this.languageImportContext } @@ -1123,14 +1114,14 @@ object Scanners { nextChar() finishNamedToken(BACKQUOTED_IDENT, target = this) if (name.length == 0) - error("empty quoted identifier") + error(em"empty quoted identifier") else if (name == nme.WILDCARD) - error("wildcard invalid as backquoted identifier") + error(em"wildcard invalid as backquoted identifier") } - else error("unclosed quoted identifier") + else error(em"unclosed quoted identifier") } - private def getIdentRest(): Unit = (ch: @switch) match { + @tailrec private def getIdentRest(): Unit = (ch: @switch) match { case 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | @@ -1165,7 +1156,7 @@ object Scanners { finishNamed() } - private def getOperatorRest(): Unit = (ch: @switch) match { + @tailrec private def getOperatorRest(): Unit = (ch: @switch) match { case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | @@ -1176,23 +1167,13 @@ object Scanners { if nxch == '/' || nxch == '*' then finishNamed() else { putChar(ch); nextChar(); getOperatorRest() } case _ => - if (isSpecial(ch)) { putChar(ch); nextChar(); getOperatorRest() } + if isSpecial(ch) then { putChar(ch); nextChar(); getOperatorRest() } + else if isSupplementary(ch, isSpecial) then getOperatorRest() else finishNamed() } private def getIdentOrOperatorRest(): Unit = - if (isIdentifierPart(ch)) - getIdentRest() - else ch match { - case '~' | '!' | '@' | '#' | '%' | - '^' | '*' | '+' | '-' | '<' | - '>' | '?' | ':' | '=' | '&' | - '|' | '\\' | '/' => - getOperatorRest() - case _ => - if (isSpecial(ch)) getOperatorRest() - else finishNamed() - } + if (isIdentifierPart(ch) || isSupplementary(ch, isIdentifierPart)) getIdentRest() else getOperatorRest() def isSoftModifier: Boolean = token == IDENTIFIER @@ -1221,7 +1202,7 @@ object Scanners { nextChar() token = STRINGLIT } - else error("unclosed string literal") + else error(em"unclosed string literal") } private def getRawStringLit(): Unit = @@ -1235,7 +1216,7 @@ object Scanners { getRawStringLit() } else if (ch == SU) - incompleteInputError("unclosed multi-line string literal") + incompleteInputError(em"unclosed multi-line string literal") else { putChar(ch) nextRawChar() @@ -1305,7 +1286,7 @@ object Scanners { else if atSupplementary(ch, isUnicodeIdentifierStart) then getInterpolatedIdentRest(hasSupplement = true) else - error("invalid string interpolation: `$$`, `$\"`, `$`ident or `$`BlockExpr expected", off = charOffset - 2) + error("invalid string interpolation: `$$`, `$\"`, `$`ident or `$`BlockExpr expected".toMessage, off = charOffset - 2) putChar('$') getStringPart(multiLine) } @@ -1313,9 +1294,9 @@ object Scanners { val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF))) if (isUnclosedLiteral) if (multiLine) - incompleteInputError("unclosed multi-line string literal") + incompleteInputError(em"unclosed multi-line string literal") else - error("unclosed string literal") + error(em"unclosed string literal") else { putChar(ch) nextRawChar() @@ -1467,7 +1448,7 @@ object Scanners { } def checkNoLetter(): Unit = if (isIdentifierPart(ch) && ch >= ' ') - error("Invalid literal number") + error(em"Invalid literal number") /** Read a number into strVal and set base */ @@ -1515,7 +1496,7 @@ object Scanners { if (ch == '\'') finishCharLit() else { token = op - strVal = if (name != null) name.toString else null + strVal = Objects.toString(name) litBuf.clear() } } @@ -1550,7 +1531,7 @@ object Scanners { def resume(lastTokenData: TokenData): Unit = { this.copyFrom(lastTokenData) if (next.token != EMPTY && !ctx.reporter.hasErrors) - error("unexpected end of input: possible missing '}' in XML block") + error(em"unexpected end of input: possible missing '}' in XML block") nextToken() } diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index 7d27b3ca82b9..dba0ad3fa2ee 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -231,6 +231,8 @@ object Tokens extends TokensCommon { final val canStartInfixTypeTokens: TokenSet = literalTokens | identifierTokens | BitSet( THIS, SUPER, USCORE, LPAREN, LBRACE, AT) + final val canStartTypeTokens: TokenSet = canStartInfixTypeTokens | BitSet(LBRACE) + final val templateIntroTokens: TokenSet = BitSet(CLASS, TRAIT, OBJECT, ENUM, CASECLASS, CASEOBJECT) final val dclIntroTokens: TokenSet = BitSet(DEF, VAL, VAR, TYPE, GIVEN) @@ -287,7 +289,7 @@ object Tokens extends TokensCommon { final val closingParens = BitSet(RPAREN, RBRACKET, RBRACE) - final val softModifierNames = Set(nme.inline, nme.opaque, nme.open, nme.transparent, nme.infix) + final val softModifierNames = Set(nme.inline, nme.into, nme.opaque, nme.open, nme.transparent, nme.infix) def showTokenDetailed(token: Int): String = debugString(token) diff --git a/compiler/src/dotty/tools/dotc/parsing/package.scala b/compiler/src/dotty/tools/dotc/parsing/package.scala index a1f9c8d73ad4..ee3ecda60aee 100644 --- a/compiler/src/dotty/tools/dotc/parsing/package.scala +++ b/compiler/src/dotty/tools/dotc/parsing/package.scala @@ -17,7 +17,7 @@ package object parsing { def precedence(operator: Name): Int = if (operator eq nme.ERROR) -1 else { - val firstCh = operator.firstPart.head + val firstCh = operator.firstCodePoint if (isScalaLetter(firstCh)) 1 else if (operator.isOpAssignmentName) 0 else firstCh match { diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index 591042961dbb..77c5a1bf376b 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -6,6 +6,7 @@ package xml import scala.language.unsafeNulls import scala.collection.mutable +import core.Contexts.Context import mutable.{ Buffer, ArrayBuffer, ListBuffer } import scala.util.control.ControlThrowable import util.Chars.SU @@ -13,6 +14,7 @@ import Parsers._ import util.Spans._ import core._ import Constants._ +import Decorators.{em, toMessage} import util.SourceFile import Utility._ @@ -49,7 +51,7 @@ object MarkupParsers { override def getMessage: String = "input ended while parsing XML" } - class MarkupParser(parser: Parser, final val preserveWS: Boolean)(implicit src: SourceFile) extends MarkupParserCommon { + class MarkupParser(parser: Parser, final val preserveWS: Boolean)(using Context) extends MarkupParserCommon { import Tokens.{ LBRACE, RBRACE } @@ -329,9 +331,9 @@ object MarkupParsers { case c @ TruncatedXMLControl => ifTruncated(c.getMessage) case c @ (MissingEndTagControl | ConfusedAboutBracesControl) => - parser.syntaxError(c.getMessage + debugLastElem + ">", debugLastPos) + parser.syntaxError(em"${c.getMessage}$debugLastElem>", debugLastPos) case _: ArrayIndexOutOfBoundsException => - parser.syntaxError("missing end tag in XML literal for <%s>" format debugLastElem, debugLastPos) + parser.syntaxError(em"missing end tag in XML literal for <$debugLastElem>", debugLastPos) } finally parser.in.resume(saved) @@ -379,7 +381,7 @@ object MarkupParsers { ts(0) } }, - msg => parser.incompleteInputError(msg) + msg => parser.incompleteInputError(msg.toMessage) ) /** @see xmlPattern. resynchronizes after successful parse @@ -395,7 +397,7 @@ object MarkupParsers { tree } }, - msg => parser.syntaxError(msg, curOffset) + msg => parser.syntaxError(msg.toMessage, curOffset) ) def escapeToScala[A](op: => A, kind: String): A = { @@ -421,7 +423,7 @@ object MarkupParsers { */ def xScalaPatterns: List[Tree] = escapeToScala(parser.patterns(), "pattern") - def reportSyntaxError(offset: Int, str: String): Unit = parser.syntaxError(str, offset) + def reportSyntaxError(offset: Int, str: String): Unit = parser.syntaxError(str.toMessage, offset) def reportSyntaxError(str: String): Unit = { reportSyntaxError(curOffset, "in XML literal: " + str) nextch() diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index 3093a1c0460f..976b783c40f0 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -5,6 +5,7 @@ import scala.language.unsafeNulls import core._ import Contexts._ +import Decorators.em import config.{ PathResolver, Feature } import dotty.tools.io._ import Phases._ @@ -83,14 +84,14 @@ trait Plugins { // Verify required plugins are present. for (req <- ctx.settings.require.value ; if !(plugs exists (_.name == req))) - report.error("Missing required plugin: " + req) + report.error(em"Missing required plugin: $req") // Verify no non-existent plugin given with -P for { opt <- ctx.settings.pluginOptions.value if !(plugs exists (opt startsWith _.name + ":")) } - report.error("bad option: -P:" + opt) + report.error(em"bad option: -P:$opt") plugs } diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 348390d9c7e2..3f32b29654c9 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -58,9 +58,9 @@ object Formatting { def show(x: Seq[X]) = new CtxShow: def run(using Context) = x.map(show1) - given [A: Show, B: Show]: Show[(A, B)] with - def show(x: (A, B)) = new CtxShow: - def run(using Context) = (show1(x._1), show1(x._2)) + given [H: Show, T <: Tuple: Show]: Show[H *: T] with + def show(x: H *: T) = new CtxShow: + def run(using Context) = show1(x.head) *: Show[T].show(x.tail).ctxShow.asInstanceOf[Tuple] given [X: Show]: Show[X | Null] with def show(x: X | Null) = if x == null then "null" else Show[X].show(x.nn) @@ -71,6 +71,16 @@ object Formatting { given Show[TypeComparer.ApproxState] with def show(x: TypeComparer.ApproxState) = TypeComparer.ApproxState.Repr.show(x) + given Show[ast.TreeInfo.PurityLevel] with + def show(x: ast.TreeInfo.PurityLevel) = x match + case ast.TreeInfo.Path => "PurityLevel.Path" + case ast.TreeInfo.Pure => "PurityLevel.Pure" + case ast.TreeInfo.Idempotent => "PurityLevel.Idempotent" + case ast.TreeInfo.Impure => "PurityLevel.Impure" + case ast.TreeInfo.PurePath => "PurityLevel.PurePath" + case ast.TreeInfo.IdempotentPath => "PurityLevel.IdempotentPath" + case _ => s"PurityLevel(${x.x})" + given Show[Showable] = ShowAny given Show[Shown] = ShowAny given Show[Int] = ShowAny @@ -90,6 +100,7 @@ object Formatting { given Show[util.SourceFile] = ShowAny given Show[util.Spans.Span] = ShowAny given Show[tasty.TreeUnpickler#OwnerTree] = ShowAny + given Show[typer.ForceDegree.Value] = ShowAny private def show1[A: Show](x: A)(using Context) = show2(Show[A].show(x).ctxShow) private def show2(x: Shown)(using Context): String = x match @@ -137,236 +148,6 @@ object Formatting { } } - /** The `em` string interpolator works like the `i` string interpolator, but marks nonsensical errors - * using `...` tags. - * Note: Instead of these tags, it would be nicer to return a data structure containing the message string - * and a boolean indicating whether the message is sensical, but then we cannot use string operations - * like concatenation, stripMargin etc on the values returned by em"...", and in the current error - * message composition methods, this is crucial. - */ - def forErrorMessages(op: Context ?=> String)(using Context): String = op(using errorMessageCtx) - - private class ErrorMessagePrinter(_ctx: Context) extends RefinedPrinter(_ctx): - override def toText(tp: Type): Text = wrapNonSensical(tp, super.toText(tp)) - override def toText(sym: Symbol): Text = wrapNonSensical(sym, super.toText(sym)) - - private def wrapNonSensical(arg: Any, text: Text)(using Context): Text = { - import Message._ - def isSensical(arg: Any): Boolean = arg match { - case tpe: Type => - tpe.exists && !tpe.isErroneous - case sym: Symbol if sym.isCompleted => - sym.info match { - case _: ErrorType | TypeAlias(_: ErrorType) | NoType => false - case _ => true - } - case _ => true - } - - if (isSensical(arg)) text - else nonSensicalStartTag ~ text ~ nonSensicalEndTag - } - - private type Recorded = Symbol | ParamRef | SkolemType - - private case class SeenKey(str: String, isType: Boolean) - private class Seen extends mutable.HashMap[SeenKey, List[Recorded]] { - - override def default(key: SeenKey) = Nil - - def record(str: String, isType: Boolean, entry: Recorded)(using Context): String = { - - /** If `e1` is an alias of another class of the same name, return the other - * class symbol instead. This normalization avoids recording e.g. scala.List - * and scala.collection.immutable.List as two different types - */ - def followAlias(e1: Recorded): Recorded = e1 match { - case e1: Symbol if e1.isAliasType => - val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol - if (underlying.name == e1.name) underlying else e1 - case _ => e1 - } - val key = SeenKey(str, isType) - val existing = apply(key) - lazy val dealiased = followAlias(entry) - - // alts: The alternatives in `existing` that are equal, or follow (an alias of) `entry` - var alts = existing.dropWhile(alt => dealiased ne followAlias(alt)) - if (alts.isEmpty) { - alts = entry :: existing - update(key, alts) - } - val suffix = alts.length match { - case 1 => "" - case n => n.toString.toCharArray.map { - case '0' => '⁰' - case '1' => '¹' - case '2' => '²' - case '3' => '³' - case '4' => '⁴' - case '5' => '⁵' - case '6' => '⁶' - case '7' => '⁷' - case '8' => '⁸' - case '9' => '⁹' - }.mkString - } - str + suffix - } - } - - private class ExplainingPrinter(seen: Seen)(_ctx: Context) extends ErrorMessagePrinter(_ctx) { - - /** True if printer should a source module instead of its module class */ - private def useSourceModule(sym: Symbol): Boolean = - sym.is(ModuleClass, butNot = Package) && sym.sourceModule.exists && !_ctx.settings.YdebugNames.value - - override def simpleNameString(sym: Symbol): String = - if (useSourceModule(sym)) simpleNameString(sym.sourceModule) - else seen.record(super.simpleNameString(sym), sym.isType, sym) - - override def ParamRefNameString(param: ParamRef): String = - seen.record(super.ParamRefNameString(param), param.isInstanceOf[TypeParamRef], param) - - override def toTextRef(tp: SingletonType): Text = tp match { - case tp: SkolemType => seen.record(tp.repr.toString, isType = true, tp) - case _ => super.toTextRef(tp) - } - - override def toText(tp: Type): Text = tp match { - case tp: TypeRef if useSourceModule(tp.symbol) => Str("object ") ~ super.toText(tp) - case _ => super.toText(tp) - } - } - - /** Create explanation for single `Recorded` type or symbol */ - def explanation(entry: AnyRef)(using Context): String = { - def boundStr(bound: Type, default: ClassSymbol, cmp: String) = - if (bound.isRef(default)) "" else i"$cmp $bound" - - def boundsStr(bounds: TypeBounds): String = { - val lo = boundStr(bounds.lo, defn.NothingClass, ">:") - val hi = boundStr(bounds.hi, defn.AnyClass, "<:") - if (lo.isEmpty) hi - else if (hi.isEmpty) lo - else s"$lo and $hi" - } - - def addendum(cat: String, info: Type): String = info match { - case bounds @ TypeBounds(lo, hi) if bounds ne TypeBounds.empty => - if (lo eq hi) i" which is an alias of $lo" - else i" with $cat ${boundsStr(bounds)}" - case _ => - "" - } - - entry match { - case param: TypeParamRef => - s"is a type variable${addendum("constraint", TypeComparer.bounds(param))}" - case param: TermParamRef => - s"is a reference to a value parameter" - case sym: Symbol => - val info = - if (ctx.gadt.contains(sym)) - sym.info & ctx.gadt.fullBounds(sym) - else - sym.info - s"is a ${ctx.printer.kindString(sym)}${sym.showExtendedLocation}${addendum("bounds", info)}" - case tp: SkolemType => - s"is an unknown value of type ${tp.widen.show}" - } - } - - /** Turns a `Seen` into a `String` to produce an explanation for types on the - * form `where: T is...` - * - * @return string disambiguating types - */ - private def explanations(seen: Seen)(using Context): String = { - def needsExplanation(entry: Recorded) = entry match { - case param: TypeParamRef => ctx.typerState.constraint.contains(param) - case param: ParamRef => false - case skolem: SkolemType => true - case sym: Symbol => - ctx.gadt.contains(sym) && ctx.gadt.fullBounds(sym) != TypeBounds.empty - } - - val toExplain: List[(String, Recorded)] = seen.toList.flatMap { kvs => - val res: List[(String, Recorded)] = kvs match { - case (key, entry :: Nil) => - if (needsExplanation(entry)) (key.str, entry) :: Nil else Nil - case (key, entries) => - for (alt <- entries) yield { - val tickedString = seen.record(key.str, key.isType, alt) - (tickedString, alt) - } - } - res // help the inferrencer out - }.sortBy(_._1) - - def columnar(parts: List[(String, String)]): List[String] = { - lazy val maxLen = parts.map(_._1.length).max - parts.map { - case (leader, trailer) => - val variable = hl(leader) - s"""$variable${" " * (maxLen - leader.length)} $trailer""" - } - } - - val explainParts = toExplain.map { case (str, entry) => (str, explanation(entry)) } - val explainLines = columnar(explainParts) - if (explainLines.isEmpty) "" else i"where: $explainLines%\n %\n" - } - - private def errorMessageCtx(using Context): Context = - val ctx1 = ctx.property(MessageLimiter) match - case Some(_: ErrorMessageLimiter) => ctx - case _ => ctx.fresh.setProperty(MessageLimiter, ErrorMessageLimiter()) - ctx1.printer match - case _: ErrorMessagePrinter => ctx1 - case _ => ctx1.fresh.setPrinterFn(ctx => ErrorMessagePrinter(ctx)) - - /** Context with correct printer set for explanations */ - private def explainCtx(seen: Seen)(using Context): Context = - val ectx = errorMessageCtx - ectx.printer match - case dp: ExplainingPrinter => - ectx // re-use outer printer and defer explanation to it - case _ => - ectx.fresh.setPrinterFn(ctx => new ExplainingPrinter(seen)(ctx)) - - /** Entrypoint for explanation string interpolator: - * - * ``` - * ex"disambiguate $tpe1 and $tpe2" - * ``` - */ - def explained(op: Context ?=> String)(using Context): String = { - val seen = new Seen - val msg = op(using explainCtx(seen)) - val addendum = explanations(seen) - if (addendum.isEmpty) msg else msg ++ "\n\n" ++ addendum - } - - /** When getting a type mismatch it is useful to disambiguate placeholders like: - * - * ``` - * found: List[Int] - * required: List[T] - * where: T is a type in the initializer of value s which is an alias of - * String - * ``` - * - * @return the `where` section as well as the printing context for the - * placeholders - `("T is a...", printCtx)` - */ - def disambiguateTypes(args: Type*)(using Context): (String, Context) = { - val seen = new Seen - val printCtx = explainCtx(seen) - args.foreach(_.show(using printCtx)) // showing each member will put it into `seen` - (explanations(seen), printCtx) - } - /** This method will produce a colored type diff from the given arguments. * The idea is to do this for known cases that are useful and then fall back * on regular syntax highlighting for the cases which are unhandled. @@ -378,16 +159,13 @@ object Formatting { * @return the (found, expected, changePercentage) with coloring to * highlight the difference */ - def typeDiff(found: Type, expected: Type)(using Context): (String, String) = { - val fnd = wrapNonSensical(found, found.toText(ctx.printer)).show - val exp = wrapNonSensical(expected, expected.toText(ctx.printer)).show - - DiffUtil.mkColoredTypeDiff(fnd, exp) match { - case _ if ctx.settings.color.value == "never" => (fnd, exp) - case (fnd, exp, change) if change < 0.5 => (fnd, exp) + def typeDiff(found: Type, expected: Type)(using Context): (String, String) = + val fnd = found.show + val exp = expected.show + DiffUtil.mkColoredTypeDiff(fnd, exp) match + case (fnd1, exp1, change) + if change < 0.5 && ctx.settings.color.value != "never" => (fnd1, exp1) case _ => (fnd, exp) - } - } /** Explicit syntax highlighting */ def hl(s: String)(using Context): String = diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index d62b7afef707..ee0062f77dcd 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -14,7 +14,7 @@ import Variances.varianceSign import util.SourcePosition import scala.util.control.NonFatal import scala.annotation.switch -import config.Config +import config.{Config, Feature} import cc.{CapturingType, EventuallyCapturingType, CaptureSet, isBoxed} class PlainPrinter(_ctx: Context) extends Printer { @@ -111,13 +111,19 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def refinementNameString(tp: RefinedType): String = nameString(tp.refinedName) /** String representation of a refinement */ - protected def toTextRefinement(rt: RefinedType): Closed = - (refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close + protected def toTextRefinement(rt: RefinedType): Text = + val keyword = rt.refinedInfo match { + case _: ExprType | _: MethodOrPoly => "def " + case _: TypeBounds => "type " + case _: TypeProxy => "val " + case _ => "" + } + (keyword ~ refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close - protected def argText(arg: Type): Text = homogenizeArg(arg) match { + protected def argText(arg: Type, isErased: Boolean = false): Text = keywordText("erased ").provided(isErased) ~ (homogenizeArg(arg) match { case arg: TypeBounds => "?" ~ toText(arg) case arg => toText(arg) - } + }) /** Pretty-print comma-separated type arguments for a constructor to be inserted among parentheses or brackets * (hence with `GlobalPrec` precedence). @@ -218,7 +224,7 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp: PreviousErrorType if ctx.settings.XprintTypes.value => "" // do not print previously reported error message because they may try to print this error type again recuresevely case tp: ErrorType => - s"" + s"" case tp: WildcardType => if (tp.optBounds.exists) "" else "" case NoType => @@ -229,7 +235,6 @@ class PlainPrinter(_ctx: Context) extends Printer { changePrec(GlobalPrec) { "(" ~ keywordText("using ").provided(tp.isContextualMethod) - ~ keywordText("erased ").provided(tp.isErasedMethod) ~ keywordText("implicit ").provided(tp.isImplicitMethod && !tp.isContextualMethod) ~ paramsText(tp) ~ ")" @@ -242,7 +247,7 @@ class PlainPrinter(_ctx: Context) extends Printer { else toText(CapturingType(ExprType(parent), refs)) case ExprType(restp) => changePrec(GlobalPrec) { - (if ctx.settings.Ycc.value then "-> " else "=> ") ~ toText(restp) + (if Feature.pureFunsEnabled then "-> " else "=> ") ~ toText(restp) } case tp: HKTypeLambda => changePrec(GlobalPrec) { @@ -258,8 +263,9 @@ class PlainPrinter(_ctx: Context) extends Printer { if annot.symbol == defn.InlineParamAnnot || annot.symbol == defn.ErasedParamAnnot then toText(tpe) else toTextLocal(tpe) ~ " " ~ toText(annot) case tp: TypeVar => + def toTextCaret(tp: Type) = if printDebug then toTextLocal(tp) ~ Str("^") else toText(tp) if (tp.isInstantiated) - toTextLocal(tp.instanceOpt) ~ (Str("^") provided printDebug) + toTextCaret(tp.instanceOpt) else { val constr = ctx.typerState.constraint val bounds = @@ -267,7 +273,7 @@ class PlainPrinter(_ctx: Context) extends Printer { withMode(Mode.Printing)(TypeComparer.fullBounds(tp.origin)) else TypeBounds.empty - if (bounds.isTypeAlias) toText(bounds.lo) ~ (Str("^") provided printDebug) + if (bounds.isTypeAlias) toTextCaret(bounds.lo) else if (ctx.settings.YshowVarBounds.value) "(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")" else toText(tp.origin) } @@ -278,6 +284,8 @@ class PlainPrinter(_ctx: Context) extends Printer { case ex: Throwable => Str("...") } "LazyRef(" ~ refTxt ~ ")" + case Range(lo, hi) => + toText(lo) ~ ".." ~ toText(hi) case _ => tp.fallbackToText(this) } @@ -287,9 +295,10 @@ class PlainPrinter(_ctx: Context) extends Printer { "(" ~ toTextRef(tp) ~ " : " ~ toTextGlobal(tp.underlying) ~ ")" protected def paramsText(lam: LambdaType): Text = { - def paramText(name: Name, tp: Type) = - toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp, isParameter = true) - Text(lam.paramNames.lazyZip(lam.paramInfos).map(paramText), ", ") + val erasedParams = lam.erasedParams + def paramText(name: Name, tp: Type, erased: Boolean) = + keywordText("erased ").provided(erased) ~ toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp, isParameter = true) + Text(lam.paramNames.lazyZip(lam.paramInfos).lazyZip(erasedParams).map(paramText), ", ") } protected def ParamRefNameString(name: Name): String = nameString(name) @@ -376,6 +385,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toTextCaptureRef(tp: Type): Text = homogenize(tp) match + case tp: TermRef if tp.symbol == defn.captureRoot => Str("*") case tp: SingletonType => toTextRef(tp) case _ => toText(tp) @@ -606,7 +616,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toText(sc: Scope): Text = ("Scope{" ~ dclsText(sc.toList) ~ "}").close - def toText[T >: Untyped](tree: Tree[T]): Text = { + def toText[T <: Untyped](tree: Tree[T]): Text = { def toTextElem(elem: Any): Text = elem match { case elem: Showable => elem.toText(this) case elem: List[?] => "List(" ~ Text(elem map toTextElem, ",") ~ ")" @@ -688,11 +698,18 @@ class PlainPrinter(_ctx: Context) extends Printer { Text(ups.map(toText), ", ") Text(deps, "\n") } + val depsText = if Config.showConstraintDeps then c.depsToString else "" //Printer.debugPrintUnique = false - Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText)) + Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText, depsText)) finally ctx.typerState.constraint = savedConstraint + def toText(g: GadtConstraint): Text = + val deps = for sym <- g.symbols yield + val bound = g.fullBounds(sym).nn + (typeText(toText(sym.typeRef)) ~ toText(bound)).close + ("GadtConstraint(" ~ Text(deps, ", ") ~ ")").close + def plain: PlainPrinter = this protected def keywordStr(text: String): String = coloredStr(text, SyntaxHighlighting.KeywordColor) diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index b883b6be805b..326630844dde 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -31,7 +31,7 @@ abstract class Printer { * ### `atPrec` vs `changePrec` * * This is to be used when changing precedence inside some sort of parentheses: - * for instance, to print T[A]` use + * for instance, to print `T[A]` use * `toText(T) ~ '[' ~ atPrec(GlobalPrec) { toText(A) } ~ ']'`. * * If the presence of the parentheses depends on precedence, inserting them manually is most certainly a bug. @@ -60,8 +60,7 @@ abstract class Printer { * A op B op' C parses as (A op B) op' C if op and op' are left-associative, and as * A op (B op' C) if they're right-associative, so we need respectively * ```scala - * val isType = ??? // is this a term or type operator? - * val prec = parsing.precedence(op, isType) + * val prec = parsing.precedence(op) * // either: * changePrec(prec) { toText(a) ~ op ~ atPrec(prec + 1) { toText(b) } } // for left-associative op and op' * // or: @@ -149,7 +148,7 @@ abstract class Printer { def toText(sc: Scope): Text /** Textual representation of tree */ - def toText[T >: Untyped](tree: Tree[T]): Text + def toText[T <: Untyped](tree: Tree[T]): Text /** Textual representation of source position */ def toText(pos: SourcePosition): Text @@ -163,6 +162,9 @@ abstract class Printer { /** Textual representation of a constraint */ def toText(c: OrderingConstraint): Text + /** Textual representation of a GADT constraint */ + def toText(c: GadtConstraint): Text + /** Render element within highest precedence */ def toTextLocal(elem: Showable): Text = atPrec(DotPrec) { elem.toText(this) } diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 619bfafeb775..014e5ddf0d66 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -3,6 +3,7 @@ package dotc package printing import core._ +import Constants.* import Texts._ import Types._ import Flags._ @@ -24,7 +25,7 @@ import NameKinds.{WildcardParamName, DefaultGetterName} import util.Chars.isOperatorPart import transform.TypeUtils._ import transform.SymUtils._ -import config.Config +import config.{Config, Feature} import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.ast.untpd.{MemberDef, Modifiers, PackageDef, RefTree, Template, TypeDef, ValOrDefDef} @@ -40,7 +41,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { override def printerContext: Context = myCtx - def withEnclosingDef(enclDef: Tree[? >: Untyped])(op: => Text): Text = { + def withEnclosingDef(enclDef: Tree[?])(op: => Text): Text = { val savedCtx = myCtx if (enclDef.hasType && enclDef.symbol.exists) myCtx = ctx.withOwner(enclDef.symbol) @@ -58,6 +59,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { try op finally myCtx = savedCtx } + inline def inContextBracket(inline op: Text): Text = + val savedCtx = myCtx + try op finally myCtx = savedCtx + def withoutPos(op: => Text): Text = { val savedPrintPos = printPos printPos = false @@ -143,17 +148,16 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextTuple(args: List[Type]): Text = "(" ~ argsText(args) ~ ")" - def toTextFunction(args: List[Type], isGiven: Boolean, isErased: Boolean, isPure: Boolean): Text = + def toTextFunction(args: List[Type], isGiven: Boolean, isPure: Boolean): Text = changePrec(GlobalPrec) { val argStr: Text = if args.length == 2 && !defn.isTupleNType(args.head) - && !isGiven && !isErased + && !isGiven then atPrec(InfixPrec) { argText(args.head) } else "(" - ~ keywordText("erased ").provided(isErased) ~ argsText(args.init) ~ ")" argStr ~ " " ~ arrow(isGiven, isPure) ~ " " ~ argText(args.last) @@ -163,7 +167,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case info: MethodType => changePrec(GlobalPrec) { "(" - ~ keywordText("erased ").provided(info.isErasedMethod) ~ paramsText(info) ~ ") " ~ arrow(info.isImplicitMethod, isPure) @@ -219,9 +222,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => val tsym = tycon.typeSymbol if tycon.isRepeatedParam then toTextLocal(args.head) ~ "*" + else if tp.isConvertibleParam then "into " ~ toText(args.head) else if defn.isFunctionSymbol(tsym) then - toTextFunction(args, tsym.name.isContextFunction, tsym.name.isErasedFunction, - isPure = ctx.settings.Ycc.value && !tsym.name.isImpureFunction) + toTextFunction(args, tsym.name.isContextFunction, + isPure = Feature.pureFunsEnabled && !tsym.name.isImpureFunction) else if isInfixType(tp) then val l :: r :: Nil = args: @unchecked val opName = tyconName(tycon) @@ -248,7 +252,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toText(tycon) case tp: RefinedType if defn.isFunctionOrPolyType(tp) && !printDebug => toTextMethodAsFunction(tp.refinedInfo, - isPure = ctx.settings.Ycc.value && !tp.typeSymbol.name.isImpureFunction) + isPure = Feature.pureFunsEnabled && !tp.typeSymbol.name.isImpureFunction) case tp: TypeRef => if (tp.symbol.isAnonymousClass && !showUniqueIds) toText(tp.info) @@ -272,6 +276,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tp: LazyRef if !printDebug => try toText(tp.ref) catch case ex: Throwable => "..." + case AnySelectionProto => + "a type that can be selected or applied" case tp: SelectionProto => "?{ " ~ toText(tp.name) ~ (Str(" ") provided !tp.name.toSimpleName.last.isLetterOrDigit) ~ @@ -279,14 +285,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tp: ViewProto => toText(tp.argType) ~ " ?=>? " ~ toText(tp.resultType) case tp @ FunProto(args, resultType) => - val argsText = args match { - case dummyTreeOfType(tp) :: Nil if !(tp isRef defn.NullClass) => "null: " ~ toText(tp) - case _ => toTextGlobal(args, ", ") - } "[applied to (" ~ keywordText("using ").provided(tp.isContextualMethod) - ~ keywordText("erased ").provided(tp.isErasedMethod) - ~ argsText + ~ argsTreeText(args) ~ ") returning " ~ toText(resultType) ~ "]" @@ -302,13 +303,19 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def exprToText(tp: ExprType): Text = "=> " ~ toText(tp.resType) - protected def blockToText[T >: Untyped](block: Block[T]): Text = + protected def argsTreeText(args: List[untpd.Tree]): Text = args match + case dummyTreeOfType(tp) :: Nil if !tp.isRef(defn.NullClass) && !homogenizedView => toText(Constant(null)) ~ ": " ~ toText(tp) + case _ => toTextGlobal(args, ", ") + + protected def blockToText[T <: Untyped](block: Block[T]): Text = blockText(block.stats :+ block.expr) - protected def blockText[T >: Untyped](trees: List[Tree[T]]): Text = - ("{" ~ toText(trees, "\n") ~ "}").close + protected def blockText[T <: Untyped](trees: List[Tree[T]]): Text = + inContextBracket { + ("{" ~ toText(trees, "\n") ~ "}").close + } - protected def typeApplyText[T >: Untyped](tree: TypeApply[T]): Text = { + protected def typeApplyText[T <: Untyped](tree: TypeApply[T]): Text = { val funText = toTextLocal(tree.fun) tree.fun match { case Select(New(tpt), nme.CONSTRUCTOR) if tpt.typeOpt.dealias.isInstanceOf[AppliedType] => @@ -318,7 +325,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - protected def toTextCore[T >: Untyped](tree: Tree[T]): Text = { + protected def toTextCore[T <: Untyped](tree: Tree[T]): Text = { import untpd._ def isLocalThis(tree: Tree) = tree.typeOpt match { @@ -433,7 +440,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toTextLocal(fun) ~ "(" ~ Str("using ").provided(app.applyKind == ApplyKind.Using && !homogenizedView) - ~ toTextGlobal(args, ", ") + ~ argsTreeText(args) ~ ")" case tree: TypeApply => typeApplyText(tree) @@ -515,9 +522,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case SeqLiteral(elems, elemtpt) => "[" ~ toTextGlobal(elems, ",") ~ " : " ~ toText(elemtpt) ~ "]" case tree @ Inlined(call, bindings, body) => - (("/* inlined from " ~ (if (call.isEmpty) "outside" else toText(call)) ~ " */ ") `provided` - !homogenizedView && ctx.settings.XprintInline.value) ~ - (if bindings.isEmpty then toText(body) else blockText(bindings :+ body)) + val bodyText = if bindings.isEmpty then toText(body) else blockText(bindings :+ body) + if homogenizedView || !ctx.settings.XprintInline.value then bodyText + else if call.isEmpty then stringText("{{") ~ stringText("/* inlined from outside */") ~ bodyText ~ stringText("}}") + else keywordText("{{") ~ keywordText("/* inlined from ") ~ toText(call) ~ keywordText(" */") ~ bodyText ~ keywordText("}}") case tpt: untpd.DerivedTypeTree => "" case TypeTree() => @@ -554,13 +562,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { (" <: " ~ toText(bound) provided !bound.isEmpty) } case ByNameTypeTree(tpt) => - (if ctx.settings.Ycc.value then "-> " else "=> ") + (if Feature.pureFunsEnabled then "-> " else "=> ") ~ toTextLocal(tpt) case TypeBoundsTree(lo, hi, alias) => if (lo eq hi) && alias.isEmpty then optText(lo)(" = " ~ _) else optText(lo)(" >: " ~ _) ~ optText(hi)(" <: " ~ _) ~ optText(alias)(" = " ~ _) case bind @ Bind(name, body) => - keywordText("given ").provided(tree.symbol.isOneOf(GivenOrImplicit) && !homogenizedView) ~ // Used for scala.quoted.Type in quote patterns (not pickled) + toTextOwner(bind) ~ keywordText("given ").provided(tree.symbol.isOneOf(GivenOrImplicit) && !homogenizedView) ~ // Used for scala.quoted.Type in quote patterns (not pickled) changePrec(InfixPrec) { nameIdText(bind) ~ " @ " ~ toText(body) } case Alternative(trees) => changePrec(OrPrec) { toText(trees, " | ") } @@ -596,7 +604,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { typeDefText(tparamsTxt, optText(rhs)(" = " ~ _)) } recur(rhs, "", true) - case Import(expr, selectors) => + case tree @ Import(expr, selectors) => + myCtx = myCtx.importContext(tree, tree.symbol) keywordText("import ") ~ importText(expr, selectors) case Export(expr, selectors) => keywordText("export ") ~ importText(expr, selectors) @@ -616,7 +625,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { try changePrec(GlobalPrec)(toText(captureSet) ~ " " ~ toText(arg)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner == defn.RetainsAnnot - && ctx.settings.Ycc.value && Config.printCaptureSetsAsPrefix && !printDebug + && Feature.ccEnabled && Config.printCaptureSetsAsPrefix && !printDebug then toTextRetainsAnnot else toTextAnnot case EmptyTree => @@ -638,31 +647,33 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case str: Literal => strText(str) } toText(id) ~ "\"" ~ Text(segments map segmentText, "") ~ "\"" - case Function(args, body) => + case fn @ Function(args, body) => var implicitSeen: Boolean = false var isGiven: Boolean = false - var isErased: Boolean = false - def argToText(arg: Tree) = arg match { + val erasedParams = fn match { + case fn: FunctionWithMods => fn.erasedParams + case _ => fn.args.map(_ => false) + } + def argToText(arg: Tree, isErased: Boolean) = arg match { case arg @ ValDef(name, tpt, _) => val implicitText = if ((arg.mods.is(Given))) { isGiven = true; "" } - else if ((arg.mods.is(Erased))) { isErased = true; "" } else if ((arg.mods.is(Implicit)) && !implicitSeen) { implicitSeen = true; keywordStr("implicit ") } else "" - implicitText ~ toText(name) ~ optAscription(tpt) + val erasedText = if isErased then keywordStr("erased ") else "" + implicitText ~ erasedText ~ toText(name) ~ optAscription(tpt) case _ => toText(arg) } val argsText = args match { - case (arg @ ValDef(_, tpt, _)) :: Nil if tpt.isEmpty => argToText(arg) + case (arg @ ValDef(_, tpt, _)) :: Nil if tpt.isEmpty => argToText(arg, erasedParams(0)) case _ => "(" - ~ keywordText("erased ").provided(isErased) - ~ Text(args.map(argToText), ", ") + ~ Text(args.zip(erasedParams).map(argToText), ", ") ~ ")" } val isPure = - ctx.settings.Ycc.value + Feature.pureFunsEnabled && tree.match case tree: FunctionWithMods => !tree.mods.is(Impure) case _ => true @@ -730,7 +741,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - override def toText[T >: Untyped](tree: Tree[T]): Text = controlled { + override def toText[T <: Untyped](tree: Tree[T]): Text = controlled { import untpd._ var txt = toTextCore(tree) @@ -817,7 +828,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def dropAnnotForModText(sym: Symbol): Boolean = sym == defn.BodyAnnot - protected def optAscription[T >: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) + protected def optAscription[T <: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) private def idText(tree: untpd.Tree): Text = (if showUniqueIds && tree.hasType && tree.symbol.exists then s"#${tree.symbol.id}" else "") ~ @@ -833,7 +844,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def useSymbol(tree: untpd.Tree) = tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value - protected def nameIdText[T >: Untyped](tree: NameTree[T]): Text = + protected def nameIdText[T <: Untyped](tree: NameTree[T]): Text = if (tree.hasType && tree.symbol.exists) { val str = nameString(tree.symbol) tree match { @@ -847,26 +858,25 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def toTextOwner(tree: Tree[?]) = "[owner = " ~ tree.symbol.maybeOwner.show ~ "]" provided ctx.settings.YprintDebugOwners.value - protected def dclTextOr[T >: Untyped](tree: Tree[T])(treeText: => Text): Text = + protected def dclTextOr[T <: Untyped](tree: Tree[T])(treeText: => Text): Text = toTextOwner(tree) ~ { if (useSymbol(tree)) annotsText(tree.symbol) ~~ dclText(tree.symbol) else treeText } - def paramsText[T>: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match + def paramsText[T <: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match case Nil => "()" case untpd.ValDefs(vparams @ (vparam :: _)) => "(" ~ keywordText("using ").provided(vparam.mods.is(Given)) - ~ keywordText("erased ").provided(vparam.mods.is(Erased)) ~ toText(vparams, ", ") ~ ")" case untpd.TypeDefs(tparams) => "[" ~ toText(tparams, ", ") ~ "]" - def addParamssText[T >: Untyped](leading: Text, paramss: List[ParamClause[T]]): Text = + def addParamssText[T <: Untyped](leading: Text, paramss: List[ParamClause[T]]): Text = paramss.foldLeft(leading)((txt, params) => txt ~ paramsText(params)) - protected def valDefToText[T >: Untyped](tree: ValDef[T]): Text = { + protected def valDefToText[T <: Untyped](tree: ValDef[T]): Text = { dclTextOr(tree) { modText(tree.mods, tree.symbol, keywordStr(if (tree.mods.is(Mutable)) "var" else "val"), isType = false) ~~ valDefText(nameIdText(tree)) ~ optAscription(tree.tpt) ~ @@ -874,7 +884,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - protected def defDefToText[T >: Untyped](tree: DefDef[T]): Text = { + protected def defDefToText[T <: Untyped](tree: DefDef[T]): Text = { import untpd._ dclTextOr(tree) { val defKeyword = modText(tree.mods, tree.symbol, keywordStr("def"), isType = false) @@ -884,30 +894,31 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if isExtension then val paramss = if tree.name.isRightAssocOperatorName then + // If you change the names of the clauses below, also change them in right-associative-extension-methods.md // we have the following encoding of tree.paramss: - // (leadingTyParamss ++ leadingUsing - // ++ rightTyParamss ++ rightParamss - // ++ leftParamss ++ trailingUsing ++ rest) + // (leftTyParams ++ leadingUsing + // ++ rightTyParams ++ rightParam + // ++ leftParam ++ trailingUsing ++ rest) // e.g. // extension [A](using B)(c: C)(using D) // def %:[E](f: F)(g: G)(using H): Res = ??? // will have the following values: - // - leadingTyParamss = List(`[A]`) + // - leftTyParams = List(`[A]`) // - leadingUsing = List(`(using B)`) - // - rightTyParamss = List(`[E]`) - // - rightParamss = List(`(f: F)`) - // - leftParamss = List(`(c: C)`) + // - rightTyParams = List(`[E]`) + // - rightParam = List(`(f: F)`) + // - leftParam = List(`(c: C)`) // - trailingUsing = List(`(using D)`) // - rest = List(`(g: G)`, `(using H)`) - // we need to swap (rightTyParams ++ rightParamss) with (leftParamss ++ trailingUsing) - val (leadingTyParamss, rest1) = tree.paramss.span(isTypeParamClause) + // we need to swap (rightTyParams ++ rightParam) with (leftParam ++ trailingUsing) + val (leftTyParams, rest1) = tree.paramss.span(isTypeParamClause) val (leadingUsing, rest2) = rest1.span(isUsingClause) - val (rightTyParamss, rest3) = rest2.span(isTypeParamClause) - val (rightParamss, rest4) = rest3.splitAt(1) - val (leftParamss, rest5) = rest4.splitAt(1) + val (rightTyParams, rest3) = rest2.span(isTypeParamClause) + val (rightParam, rest4) = rest3.splitAt(1) + val (leftParam, rest5) = rest4.splitAt(1) val (trailingUsing, rest6) = rest5.span(isUsingClause) - if leftParamss.nonEmpty then - leadingTyParamss ::: leadingUsing ::: leftParamss ::: trailingUsing ::: rightTyParamss ::: rightParamss ::: rest6 + if leftParam.nonEmpty then + leftTyParams ::: leadingUsing ::: leftParam ::: trailingUsing ::: rightTyParams ::: rightParam ::: rest6 else tree.paramss // it wasn't a binary operator, after all. else @@ -963,7 +974,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } else impl.body - val bodyText = " {" ~~ selfText ~ toTextGlobal(primaryConstrs ::: body, "\n") ~ "}" + val bodyText = inContextBracket( + " {" ~~ selfText ~ toTextGlobal(primaryConstrs ::: body, "\n") ~ "}") prefix ~ keywordText(" extends").provided(!ofNew && impl.parents.nonEmpty) ~~ parentsText ~ @@ -979,14 +991,14 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { ) } - protected def toTextPackageId[T >: Untyped](pid: Tree[T]): Text = - if (homogenizedView && pid.hasType) toTextLocal(pid.tpe.asInstanceOf[Showable]) + protected def toTextPackageId[T <: Untyped](pid: Tree[T]): Text = + if (homogenizedView && pid.hasType) toTextLocal(pid.typeOpt) else toTextLocal(pid) protected def packageDefText(tree: PackageDef): Text = { val statsText = tree.stats match { case (pdef: PackageDef) :: Nil => toText(pdef) - case _ => toTextGlobal(tree.stats, "\n") + case _ => inContextBracket(toTextGlobal(tree.stats, "\n")) } val bodyText = if (currentPrecedence == TopLevelPrec) "\n" ~ statsText else " {" ~ statsText ~ "}" @@ -1018,7 +1030,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else PrintableFlags(isType) if (homogenizedView && mods.flags.isTypeFlags) flagMask &~= GivenOrImplicit // drop implicit/given from classes val rawFlags = if (sym.exists) sym.flagsUNSAFE else mods.flags - if (rawFlags.is(Param)) flagMask = flagMask &~ Given &~ Erased + if (rawFlags.is(Param)) flagMask = flagMask &~ Given val flags = rawFlags & flagMask var flagsText = toTextFlags(sym, flags) val annotTexts = @@ -1034,10 +1046,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def optText(name: Name)(encl: Text => Text): Text = if (name.isEmpty) "" else encl(toText(name)) - def optText[T >: Untyped](tree: Tree[T])(encl: Text => Text): Text = + def optText[T <: Untyped](tree: Tree[T])(encl: Text => Text): Text = if (tree.isEmpty) "" else encl(toText(tree)) - def optText[T >: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text = + def optText[T <: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text = if (tree.exists(!_.isEmpty)) encl(blockText(tree)) else "" override protected def treatAsTypeParam(sym: Symbol): Boolean = sym.is(TypeParam) @@ -1050,7 +1062,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (sym.isImport) sym.infoOrCompleter match { case info: Namer#Completer => return info.original.show - case info: ImportType => return s"import $info.expr.show" + case info: ImportType => return s"import ${info.expr.show}" case _ => } def name = diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala index 17f86e766869..7c040a78de5e 100644 --- a/compiler/src/dotty/tools/dotc/printing/Texts.scala +++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala @@ -1,8 +1,12 @@ package dotty.tools.dotc package printing +import scala.annotation.internal.sharable object Texts { + @sharable + private val ansi = java.util.regex.Pattern.compile("\u001b\\[\\d+m").nn + sealed abstract class Text { protected def indentMargin: Int = 2 @@ -15,12 +19,17 @@ object Texts { case Vertical(relems) => relems.isEmpty } + // Str Ver Clo Flu + // isVertical F T F F + // isClosed F T T F + // isFluid F F T T + // isSplittable F F F T def isVertical: Boolean = isInstanceOf[Vertical] def isClosed: Boolean = isVertical || isInstanceOf[Closed] def isFluid: Boolean = isInstanceOf[Fluid] def isSplittable: Boolean = isFluid && !isClosed - def close: Closed = new Closed(relems) + def close: Text = if isSplittable then Closed(relems) else this def remaining(width: Int): Int = this match { case Str(s, _) => @@ -53,7 +62,7 @@ object Texts { } private def appendIndented(that: Text)(width: Int): Text = - Vertical(that.layout(width - indentMargin).indented :: this.relems) + Fluid(that.layout(width - indentMargin).indented :: this.relems) private def append(width: Int)(that: Text): Text = if (this.isEmpty) that.layout(width) @@ -65,7 +74,7 @@ object Texts { else appendIndented(that)(width) private def lengthWithoutAnsi(str: String): Int = - str.replaceAll("\u001b\\[\\d+m", "").nn.length + ansi.matcher(str).nn.replaceAll("").nn.length def layout(width: Int): Text = this match { case Str(s, _) => @@ -113,7 +122,7 @@ object Texts { sb.append("|") } } - sb.append(s) + sb.append(s.replaceAll("[ ]+$", "")) case _ => var follow = false for (elem <- relems.reverse) { @@ -138,7 +147,13 @@ object Texts { def ~ (that: Text): Text = if (this.isEmpty) that else if (that.isEmpty) this - else Fluid(that :: this :: Nil) + else this match + case Fluid(relems1) if !isClosed => that match + case Fluid(relems2) if !that.isClosed => Fluid(relems2 ++ relems1) + case _ => Fluid(that +: relems1) + case _ => that match + case Fluid(relems2) if !that.isClosed => Fluid(relems2 :+ this) + case _ => Fluid(that :: this :: Nil) def ~~ (that: Text): Text = if (this.isEmpty) that @@ -161,9 +176,9 @@ object Texts { def apply(xs: Traversable[Text], sep: String = " "): Text = if (sep == "\n") lines(xs) else { - val ys = xs filterNot (_.isEmpty) + val ys = xs.filterNot(_.isEmpty) if (ys.isEmpty) Str("") - else ys reduce (_ ~ sep ~ _) + else ys.reduceRight((a, b) => (a ~ sep).close ~ b) } /** The given texts `xs`, each on a separate line */ @@ -176,12 +191,16 @@ object Texts { case class Str(s: String, lineRange: LineRange = EmptyLineRange) extends Text { override def relems: List[Text] = List(this) + override def toString = this match + case Str(s, EmptyLineRange) => s"Str($s)" + case Str(s, lineRange) => s"Str($s, $lineRange)" } case class Vertical(relems: List[Text]) extends Text case class Fluid(relems: List[Text]) extends Text - class Closed(relems: List[Text]) extends Fluid(relems) + class Closed(relems: List[Text]) extends Fluid(relems): + override def productPrefix = "Closed" implicit def stringToText(s: String): Text = Str(s) diff --git a/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java b/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java index 68ae4f148cfd..60f44db16add 100644 --- a/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java +++ b/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java @@ -248,13 +248,14 @@ public SunThreadMxBean(ThreadMXBean underlying) { super(underlying); this.real = underlying; try { - getThreadUserTimeMethod = real.getClass().getMethod("getThreadUserTime", long[].class); - isThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("isThreadAllocatedMemoryEnabled"); - setThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); - getThreadAllocatedBytesMethod1 = real.getClass().getMethod("getThreadAllocatedBytes", Long.TYPE); - getThreadAllocatedBytesMethod2 = real.getClass().getMethod("getThreadAllocatedBytes", long[].class); - isThreadAllocatedMemorySupportedMethod = real.getClass().getMethod("isThreadAllocatedMemorySupported"); - getThreadCpuTimeMethod = real.getClass().getMethod("getThreadCpuTime", long[].class); + Class cls = Class.forName("com.sun.management.ThreadMXBean"); + getThreadUserTimeMethod = cls.getMethod("getThreadUserTime", long[].class); + isThreadAllocatedMemoryEnabledMethod = cls.getMethod("isThreadAllocatedMemoryEnabled"); + setThreadAllocatedMemoryEnabledMethod = cls.getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); + getThreadAllocatedBytesMethod1 = cls.getMethod("getThreadAllocatedBytes", Long.TYPE); + getThreadAllocatedBytesMethod2 = cls.getMethod("getThreadAllocatedBytes", long[].class); + isThreadAllocatedMemorySupportedMethod = cls.getMethod("isThreadAllocatedMemorySupported"); + getThreadCpuTimeMethod = cls.getMethod("getThreadCpuTime", long[].class); getThreadUserTimeMethod.setAccessible(true); isThreadAllocatedMemoryEnabledMethod.setAccessible(true); diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index 0283fb904476..64cc08160701 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -13,6 +13,7 @@ import javax.management.{Notification, NotificationEmitter, NotificationListener import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.Contexts._ import dotty.tools.io.AbstractFile +import annotation.internal.sharable object Profiler { def apply()(using Context): Profiler = @@ -103,6 +104,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) private val mainThread = Thread.currentThread() + @nowarn("cat=deprecation") private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { import RealProfiler._ val current = Thread.currentThread() @@ -216,14 +218,16 @@ sealed trait ProfileReporter { } object ConsoleProfileReporter extends ProfileReporter { - + @sharable var totalAlloc = 0L override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? + reportCommon(EventType.BACKGROUND, profiler, threadRange) override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? + reportCommon(EventType.MAIN, profiler, threadRange) + @nowarn("cat=deprecation") + private def reportCommon(tpe:EventType, profiler: RealProfiler, threadRange: ProfileRange): Unit = + totalAlloc += threadRange.allocatedBytes + println(s"${threadRange.phase.phaseName.replace(',', ' ')},run ns = ${threadRange.runNs},idle ns = ${threadRange.idleNs},cpu ns = ${threadRange.cpuNs},user ns = ${threadRange.userNs},allocated = ${threadRange.allocatedBytes},heap at end = ${threadRange.end.heapBytes}, total allocated = $totalAlloc ") override def close(profiler: RealProfiler): Unit = () @@ -245,6 +249,7 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { reportCommon(EventType.BACKGROUND, profiler, threadRange) override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = reportCommon(EventType.MAIN, profiler, threadRange) + @nowarn("cat=deprecation") private def reportCommon(tpe:EventType, profiler: RealProfiler, threadRange: ProfileRange): Unit = out.println(s"$tpe,${threadRange.start.snapTimeNanos},${threadRange.end.snapTimeNanos},${profiler.id},${threadRange.phase.id},${threadRange.phase.phaseName.replace(',', ' ')},${threadRange.purpose},${threadRange.taskCount},${threadRange.thread.getId},${threadRange.thread.getName},${threadRange.runNs},${threadRange.idleNs},${threadRange.cpuNs},${threadRange.userNs},${threadRange.allocatedBytes},${threadRange.end.heapBytes} ") diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala new file mode 100644 index 000000000000..a98284f4078d --- /dev/null +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -0,0 +1,369 @@ +package dotty.tools.dotc +package quoted + +import scala.language.unsafeNulls + +import scala.collection.mutable +import scala.reflect.ClassTag + +import java.io.{PrintWriter, StringWriter} +import java.lang.reflect.{InvocationTargetException, Method => JLRMethod} + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.TreeMapWithImplicits +import dotty.tools.dotc.core.Annotations._ +import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Denotations.staticRef +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.NameKinds.FlatName +import dotty.tools.dotc.core.Names._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.TypeErasure +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.staging.QuoteContext.* +import dotty.tools.dotc.typer.ImportInfo.withRootImports +import dotty.tools.dotc.util.SrcPos +import dotty.tools.dotc.reporting.Message +import dotty.tools.repl.AbstractFileClassLoader +import dotty.tools.dotc.core.CyclicReference + +/** Tree interpreter for metaprogramming constructs */ +class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): + import Interpreter._ + import tpd._ + + val classLoader = + if ctx.owner.topLevelClass.name.startsWith(str.REPL_SESSION_LINE) then + new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader0) + else classLoader0 + + /** Local variable environment */ + type Env = Map[Symbol, Object] + def emptyEnv: Env = Map.empty + inline def env(using e: Env): e.type = e + + /** Returns the result of interpreting the code in the tree. + * Return Some of the result or None if the result type is not consistent with the expected type. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + */ + final def interpret[T](tree: Tree)(using ct: ClassTag[T]): Option[T] = + interpretTree(tree)(using emptyEnv) match { + case obj: T => Some(obj) + case obj => + // TODO upgrade to a full type tag check or something similar + report.error(em"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) + None + } + + /** Returns the result of interpreting the code in the tree. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + */ + protected def interpretTree(tree: Tree)(using Env): Object = tree match { + case Literal(Constant(value)) => + interpretLiteral(value) + + case tree: Ident if tree.symbol.is(Inline, butNot = Method) => + tree.tpe.widenTermRefExpr match + case ConstantType(c) => c.value.asInstanceOf[Object] + case _ => throw new StopInterpretation(em"${tree.symbol} could not be inlined", tree.srcPos) + + // TODO disallow interpreted method calls as arguments + case Call(fn, args) => + if (fn.symbol.isConstructor) + interpretNew(fn.symbol, args.flatten.map(interpretTree)) + else if (fn.symbol.is(Module)) + interpretModuleAccess(fn.symbol) + else if (fn.symbol.is(Method) && fn.symbol.isStatic) { + interpretedStaticMethodCall(fn.symbol.owner, fn.symbol, interpretArgs(args, fn.symbol.info)) + } + else if fn.symbol.isStatic then + assert(args.isEmpty) + interpretedStaticFieldAccess(fn.symbol) + else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) + if (fn.name == nme.asInstanceOfPM) + interpretModuleAccess(fn.qualifier.symbol) + else { + interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol, interpretArgs(args, fn.symbol.info)) + } + else if (env.contains(fn.symbol)) + env(fn.symbol) + else if (tree.symbol.is(InlineProxy)) + interpretTree(tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs) + else + unexpectedTree(tree) + + case closureDef((ddef @ DefDef(_, ValDefs(arg :: Nil) :: Nil, _, _))) => + (obj: AnyRef) => interpretTree(ddef.rhs)(using env.updated(arg.symbol, obj)) + + // Interpret `foo(j = x, i = y)` which it is expanded to + // `val j$1 = x; val i$1 = y; foo(i = i$1, j = j$1)` + case Block(stats, expr) => interpretBlock(stats, expr) + case NamedArg(_, arg) => interpretTree(arg) + + case Inlined(_, bindings, expansion) => interpretBlock(bindings, expansion) + + case Typed(expr, _) => + interpretTree(expr) + + case SeqLiteral(elems, _) => + interpretVarargs(elems.map(e => interpretTree(e))) + + case _ => + unexpectedTree(tree) + } + + private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = { + def interpretArgsGroup(args: List[Tree], argTypes: List[Type]): List[Object] = + assert(args.size == argTypes.size) + val view = + for (arg, info) <- args.lazyZip(argTypes) yield + info match + case _: ExprType => () => interpretTree(arg) // by-name argument + case _ => interpretTree(arg) // by-value argument + view.toList + + fnType.dealias match + case fnType: MethodType if fnType.hasErasedParams => interpretArgs(argss, fnType.resType) + case fnType: MethodType => + val argTypes = fnType.paramInfos + assert(argss.head.size == argTypes.size) + interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, fnType.resType) + case fnType: AppliedType if defn.isContextFunctionType(fnType) => + val argTypes :+ resType = fnType.args: @unchecked + interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, resType) + case fnType: PolyType => interpretArgs(argss, fnType.resType) + case fnType: ExprType => interpretArgs(argss, fnType.resType) + case _ => + assert(argss.isEmpty) + Nil + } + + private def interpretBlock(stats: List[Tree], expr: Tree)(using Env) = { + var unexpected: Option[Object] = None + val newEnv = stats.foldLeft(env)((accEnv, stat) => stat match + case stat: ValDef => + accEnv.updated(stat.symbol, interpretTree(stat.rhs)(using accEnv)) + case stat => + if (unexpected.isEmpty) + unexpected = Some(unexpectedTree(stat)) + accEnv + ) + unexpected.getOrElse(interpretTree(expr)(using newEnv)) + } + + private def interpretLiteral(value: Any): Object = + value.asInstanceOf[Object] + + private def interpretVarargs(args: List[Object]): Object = + args.toSeq + + private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol, args: List[Object]): Object = { + val inst = + try loadModule(moduleClass) + catch + case MissingClassDefinedInCurrentRun(sym) => + suspendOnMissing(sym, pos) + val clazz = inst.getClass + val name = fn.name.asTermName + val method = getMethod(clazz, name, paramsSig(fn)) + stopIfRuntimeException(method.invoke(inst, args: _*), method) + } + + private def interpretedStaticFieldAccess(sym: Symbol): Object = { + val clazz = loadClass(sym.owner.fullName.toString) + val field = clazz.getField(sym.name.toString) + field.get(null) + } + + private def interpretModuleAccess(fn: Symbol): Object = + loadModule(fn.moduleClass) + + private def interpretNew(fn: Symbol, args: List[Object]): Object = { + val className = fn.owner.fullName.mangledString.replaceAll("\\$\\.", "\\$") + val clazz = loadClass(className) + val constr = clazz.getConstructor(paramsSig(fn): _*) + constr.newInstance(args: _*).asInstanceOf[Object] + } + + private def unexpectedTree(tree: Tree): Object = + throw new StopInterpretation(em"Unexpected tree could not be interpreted: ${tree.toString}", tree.srcPos) + + private def loadModule(sym: Symbol): Object = + if (sym.owner.is(Package)) { + // is top level object + val moduleClass = loadClass(sym.fullName.toString) + moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) + } + else { + // nested object in an object + val clazz = loadClass(sym.binaryClassName) + clazz.getConstructor().newInstance().asInstanceOf[Object] + } + + private def loadReplLineClass(moduleClass: Symbol): Class[?] = { + val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) + lineClassloader.loadClass(moduleClass.name.firstPart.toString) + } + + private def loadClass(name: String): Class[?] = + try classLoader.loadClass(name) + catch + case MissingClassDefinedInCurrentRun(sym) => + suspendOnMissing(sym, pos) + + + private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = + try clazz.getMethod(name.toString, paramClasses: _*) + catch { + case _: NoSuchMethodException => + val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" + throw new StopInterpretation(msg, pos) + case MissingClassDefinedInCurrentRun(sym) => + suspendOnMissing(sym, pos) + } + + private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = + try thunk + catch { + case ex: RuntimeException => + val sw = new StringWriter() + sw.write("A runtime exception occurred while executing macro expansion\n") + sw.write(ex.getMessage) + sw.write("\n") + ex.printStackTrace(new PrintWriter(sw)) + sw.write("\n") + throw new StopInterpretation(sw.toString.toMessage, pos) + case ex: InvocationTargetException => + ex.getTargetException match { + case ex: scala.quoted.runtime.StopMacroExpansion => + throw ex + case MissingClassDefinedInCurrentRun(sym) => + suspendOnMissing(sym, pos) + case targetException => + val sw = new StringWriter() + sw.write("Exception occurred while executing macro expansion.\n") + if (!ctx.settings.Ydebug.value) { + val end = targetException.getStackTrace.lastIndexWhere { x => + x.getClassName == method.getDeclaringClass.getCanonicalName && x.getMethodName == method.getName + } + val shortStackTrace = targetException.getStackTrace.take(end + 1) + targetException.setStackTrace(shortStackTrace) + targetException.printStackTrace(new PrintWriter(sw)) + + targetException match + case _: CyclicReference => sw.write("\nSee full stack trace using -Ydebug") + case _ => + } else { + targetException.printStackTrace(new PrintWriter(sw)) + } + sw.write("\n") + throw new StopInterpretation(sw.toString.toMessage, pos) + } + } + + /** List of classes of the parameters of the signature of `sym` */ + private def paramsSig(sym: Symbol): List[Class[?]] = { + def paramClass(param: Type): Class[?] = { + def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match { + case JavaArrayType(elemType) => arrayDepth(elemType, depth + 1) + case _ => (tpe, depth) + } + def javaArraySig(tpe: Type): String = { + val (elemType, depth) = arrayDepth(tpe, 0) + val sym = elemType.classSymbol + val suffix = + if (sym == defn.BooleanClass) "Z" + else if (sym == defn.ByteClass) "B" + else if (sym == defn.ShortClass) "S" + else if (sym == defn.IntClass) "I" + else if (sym == defn.LongClass) "J" + else if (sym == defn.FloatClass) "F" + else if (sym == defn.DoubleClass) "D" + else if (sym == defn.CharClass) "C" + else "L" + javaSig(elemType) + ";" + ("[" * depth) + suffix + } + def javaSig(tpe: Type): String = tpe match { + case tpe: JavaArrayType => javaArraySig(tpe) + case _ => + // Take the flatten name of the class and the full package name + val pack = tpe.classSymbol.topLevelClass.owner + val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." + packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString + } + + val sym = param.classSymbol + if (sym == defn.BooleanClass) classOf[Boolean] + else if (sym == defn.ByteClass) classOf[Byte] + else if (sym == defn.CharClass) classOf[Char] + else if (sym == defn.ShortClass) classOf[Short] + else if (sym == defn.IntClass) classOf[Int] + else if (sym == defn.LongClass) classOf[Long] + else if (sym == defn.FloatClass) classOf[Float] + else if (sym == defn.DoubleClass) classOf[Double] + else java.lang.Class.forName(javaSig(param), false, classLoader) + } + def getExtraParams(tp: Type): List[Type] = tp.widenDealias match { + case tp: AppliedType if defn.isContextFunctionType(tp) => + // Call context function type direct method + tp.args.init.map(arg => TypeErasure.erasure(arg)) ::: getExtraParams(tp.args.last) + case _ => Nil + } + val extraParams = getExtraParams(sym.info.finalResultType) + val allParams = TypeErasure.erasure(sym.info) match { + case meth: MethodType => meth.paramInfos ::: extraParams + case _ => extraParams + } + allParams.map(paramClass) + } +end Interpreter + +object Interpreter: + /** Exception that stops interpretation if some issue is found */ + class StopInterpretation(val msg: Message, val pos: SrcPos) extends Exception + + object Call: + import tpd._ + /** Matches an expression that is either a field access or an application + * It retruns a TermRef containing field accessed or a method reference and the arguments passed to it. + */ + def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = + Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) + + private object Call0 { + def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match { + case Select(Call0(fn, args), nme.apply) if defn.isContextFunctionType(fn.tpe.widenDealias.finalResultType) => + Some((fn, args)) + case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) + case fn: Select => Some((fn, Nil)) + case Apply(f @ Call0(fn, args1), args2) => + if (f.tpe.widenDealias.hasErasedParams) Some((fn, args1)) + else Some((fn, args2 :: args1)) + case TypeApply(Call0(fn, args), _) => Some((fn, args)) + case _ => None + } + } + end Call + + object MissingClassDefinedInCurrentRun { + def unapply(targetException: Throwable)(using Context): Option[Symbol] = { + if !ctx.compilationUnit.isSuspendable then None + else targetException match + case _: NoClassDefFoundError | _: ClassNotFoundException => + val className = targetException.getMessage + if className eq null then None + else + val sym = staticRef(className.toTypeName).symbol + if (sym.isDefinedInCurrentRun) Some(sym) else None + case _ => None + } + } + + def suspendOnMissing(sym: Symbol, pos: SrcPos)(using Context): Nothing = + if ctx.settings.XprintSuspension.value then + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 41f3fd4f64f3..20bcba417a5e 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -5,6 +5,7 @@ import dotty.tools.dotc.ast.{TreeTypeMap, tpd} import dotty.tools.dotc.config.Printers._ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.Mode import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ @@ -12,7 +13,7 @@ import dotty.tools.dotc.core.tasty.{ PositionPickler, TastyPickler, TastyPrinter import dotty.tools.dotc.core.tasty.DottyUnpickler import dotty.tools.dotc.core.tasty.TreeUnpickler.UnpickleMode import dotty.tools.dotc.report - +import dotty.tools.dotc.reporting.Message import scala.quoted.Quotes import scala.quoted.runtime.impl._ @@ -220,10 +221,10 @@ object PickledQuotes { treePkl.pickle(tree :: Nil) treePkl.compactify() if tree.span.exists then - val positionWarnings = new mutable.ListBuffer[String]() + val positionWarnings = new mutable.ListBuffer[Message]() val reference = ctx.settings.sourceroot.value - new PositionPickler(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference) - .picklePositions(ctx.compilationUnit.source, tree :: Nil, positionWarnings) + PositionPickler.picklePositions(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference, + ctx.compilationUnit.source, tree :: Nil, positionWarnings) positionWarnings.foreach(report.warning(_)) val pickled = pickler.assembleParts() @@ -248,23 +249,41 @@ object PickledQuotes { case pickled: String => TastyString.unpickle(pickled) case pickled: List[String] => TastyString.unpickle(pickled) - quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never")}") + val unpicklingContext = + if ctx.owner.isClass then + // When a quote is unpickled with a Quotes context that that has a class `spliceOwner` + // we need to use a dummy owner to unpickle it. Otherwise any definitions defined + // in the quoted block would be accidentally entered in the class. + // When splicing this expression, this owner is replaced with the correct owner (see `quotedExprToTree` and `quotedTypeToTree` above). + // On the other hand, if the expression is used as a reflect term, the user must call `changeOwner` (same as with other expressions used within a nested owner). + // `-Xcheck-macros` will check for inconsistent owners and provide the users hints on how to improve them. + // + // Quotes context that that has a class `spliceOwner` can come from a macro annotation + // or a user setting it explicitly using `Symbol.asQuotes`. + ctx.withOwner(newSymbol(ctx.owner, "$quoteOwnedByClass$".toTermName, Private, defn.AnyType, NoSymbol)) + else ctx - val mode = if (isType) UnpickleMode.TypeTree else UnpickleMode.Term - val unpickler = new DottyUnpickler(bytes, mode) - unpickler.enter(Set.empty) + inContext(unpicklingContext) { - val tree = unpickler.tree - QuotesCache(pickled) = tree + quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never")}") - // Make sure trees and positions are fully loaded - new TreeTraverser { - def traverse(tree: Tree)(using Context): Unit = traverseChildren(tree) - }.traverse(tree) + val mode = if (isType) UnpickleMode.TypeTree else UnpickleMode.Term + val unpickler = new DottyUnpickler(bytes, mode) + unpickler.enter(Set.empty) - quotePickling.println(i"**** unpickled quote\n$tree") + val tree = unpickler.tree + QuotesCache(pickled) = tree + + // Make sure trees and positions are fully loaded + new TreeTraverser { + def traverse(tree: Tree)(using Context): Unit = traverseChildren(tree) + }.traverse(tree) + + quotePickling.println(i"**** unpickled quote\n$tree") + + tree + } - tree } } diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 5addb11f1a3c..38f2ab347c4c 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -4,13 +4,12 @@ import reporting._ import Diagnostic._ import util.{SourcePosition, NoSourcePosition, SrcPos} import core._ -import Contexts._, Symbols._, Decorators._ +import Contexts._, Flags.*, Symbols._, Decorators._ import config.SourceVersion import ast._ import config.Feature.sourceVersion import java.lang.System.currentTimeMillis - object report: /** For sending messages that are printed only if -verbose is set */ @@ -18,26 +17,26 @@ object report: if ctx.settings.verbose.value then echo(msg, pos) def echo(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = - ctx.reporter.report(new Info(msg, pos.sourcePos)) + ctx.reporter.report(new Info(msg.toMessage, pos.sourcePos)) private def issueWarning(warning: Warning)(using Context): Unit = ctx.reporter.report(warning) - def deprecationWarning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def deprecationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new DeprecationWarning(msg, pos.sourcePos)) - def migrationWarning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def migrationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new MigrationWarning(msg, pos.sourcePos)) - def uncheckedWarning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def uncheckedWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new UncheckedWarning(msg, pos.sourcePos)) - def featureWarning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def featureWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new FeatureWarning(msg, pos.sourcePos)) def featureWarning(feature: String, featureDescription: => String, - featureUseSite: Symbol, required: Boolean, pos: SrcPos)(using Context): Unit = { - val req = if (required) "needs to" else "should" + featureUseSite: Symbol, required: Boolean, pos: SrcPos)(using Context): Unit = + val req = if required then "needs to" else "should" val fqname = s"scala.language.$feature" val explain = @@ -48,31 +47,45 @@ object report: |See the Scala docs for value $fqname for a discussion |why the feature $req be explicitly enabled.""".stripMargin - def msg = s"""$featureDescription $req be enabled - |by adding the import clause 'import $fqname' - |or by setting the compiler option -language:$feature.$explain""".stripMargin - if (required) error(msg, pos) + def msg = em"""$featureDescription $req be enabled + |by adding the import clause 'import $fqname' + |or by setting the compiler option -language:$feature.$explain""" + if required then error(msg, pos) else issueWarning(new FeatureWarning(msg, pos.sourcePos)) - } + end featureWarning - def warning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def warning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new Warning(msg, addInlineds(pos))) - def error(msg: Message, pos: SrcPos = NoSourcePosition, sticky: Boolean = false)(using Context): Unit = + def warning(msg: Message)(using Context): Unit = + warning(msg, NoSourcePosition) + + def warning(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + warning(msg.toMessage, pos) + + def error(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = val fullPos = addInlineds(pos) - ctx.reporter.report(if (sticky) new StickyError(msg, fullPos) else new Error(msg, fullPos)) + ctx.reporter.report(new Error(msg, fullPos)) if ctx.settings.YdebugError.value then Thread.dumpStack() + def error(msg: => String, pos: SrcPos)(using Context): Unit = + error(msg.toMessage, pos) + + def error(msg: => String)(using Context): Unit = + error(msg, NoSourcePosition) + def error(ex: TypeError, pos: SrcPos)(using Context): Unit = - error(ex.toMessage, pos, sticky = true) + val fullPos = addInlineds(pos) + ctx.reporter.report(new StickyError(ex.toMessage, fullPos)) + if ctx.settings.YdebugError.value then Thread.dumpStack() if ctx.settings.YdebugTypeError.value then ex.printStackTrace() - def errorOrMigrationWarning(msg: Message, pos: SrcPos = NoSourcePosition, from: SourceVersion)(using Context): Unit = + def errorOrMigrationWarning(msg: Message, pos: SrcPos, from: SourceVersion)(using Context): Unit = if sourceVersion.isAtLeast(from) then if sourceVersion.isMigrating && sourceVersion.ordinal <= from.ordinal then migrationWarning(msg, pos) else error(msg, pos) - def gradualErrorOrMigrationWarning(msg: Message, pos: SrcPos = NoSourcePosition, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = + def gradualErrorOrMigrationWarning(msg: Message, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = if sourceVersion.isAtLeast(errorFrom) then errorOrMigrationWarning(msg, pos, errorFrom) else if sourceVersion.isAtLeast(warnFrom) then warning(msg, pos) @@ -115,4 +128,64 @@ object report: case Nil => pos recur(pos.sourcePos, tpd.enclosingInlineds) + private object messageRendering extends MessageRendering + + // Should only be called from Run#enrichErrorMessage. + def enrichErrorMessage(errorMessage: String)(using Context): String = try { + def formatExplain(pairs: List[(String, Any)]) = pairs.map((k, v) => f"$k%20s: $v").mkString("\n") + + val settings = ctx.settings.userSetSettings(ctx.settingsState).sortBy(_.name) + val tree = ctx.tree + val sym = tree.symbol + val pos = tree.sourcePos + val path = pos.source.path + val site = ctx.outersIterator.map(_.owner).filter(sym => !sym.exists || sym.isClass || sym.is(Method)).next() + + import untpd.* + extension (tree: Tree) def summaryString: String = tree match + case Literal(const) => s"Literal($const)" + case Ident(name) => s"Ident(${name.decode})" + case Select(qual, name) => s"Select(${qual.summaryString}, ${name.decode})" + case tree: NameTree => (if tree.isType then "type " else "") + tree.name.decode + case tree => s"${tree.className}${if tree.symbol.exists then s"(${tree.symbol})" else ""}" + + val info1 = formatExplain(List( + "while compiling" -> ctx.compilationUnit, + "during phase" -> ctx.phase.prevMega, + "mode" -> ctx.mode, + "library version" -> scala.util.Properties.versionString, + "compiler version" -> dotty.tools.dotc.config.Properties.versionString, + "settings" -> settings.map(s => if s.value == "" then s"${s.name} \"\"" else s"${s.name} ${s.value}").mkString(" "), + )) + val symbolInfos = if sym eq NoSymbol then List("symbol" -> sym) else List( + "symbol" -> sym.showLocated, + "symbol definition" -> s"${sym.showDcl} (a ${sym.className})", + "symbol package" -> sym.enclosingPackageClass.fullName, + "symbol owners" -> sym.showExtendedLocation, + ) + val info2 = formatExplain(List( + "tree" -> tree.summaryString, + "tree position" -> (if pos.exists then s"$path:${pos.line + 1}:${pos.column}" else s"$path:"), + "tree type" -> tree.typeOpt.show, + ) ::: symbolInfos ::: List( + "call site" -> s"${site.showLocated} in ${site.enclosingPackageClass}" + )) + val context_s = try + s""" == Source file context for tree position == + | + |${messageRendering.messageAndPos(Diagnostic.Error("", pos))}""".stripMargin + catch case _: Exception => "" + s""" + | $errorMessage + | + | An unhandled exception was thrown in the compiler. + | Please file a crash report here: + | https://github.com/lampepfl/dotty/issues/new/choose + | + |$info1 + | + |$info2 + | + |$context_s""".stripMargin + } catch case _: Throwable => errorMessage // don't introduce new errors trying to report errors, so swallow exceptions end report diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala index dec13a4f5925..624aa93924e8 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala @@ -9,8 +9,9 @@ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING} import dotty.tools.dotc.util.SourcePosition -import java.util.Optional +import java.util.{Collections, Optional, List => JList} import scala.util.chaining._ +import core.Decorators.toMessage object Diagnostic: @@ -23,7 +24,8 @@ object Diagnostic: class Error( msg: Message, pos: SourcePosition - ) extends Diagnostic(msg, pos, ERROR) + ) extends Diagnostic(msg, pos, ERROR): + def this(str: => String, pos: SourcePosition) = this(str.toMessage, pos) /** A sticky error is an error that should not be hidden by backtracking and * trying some alternative path. Typically, errors issued after catching @@ -46,7 +48,8 @@ object Diagnostic: class Info( msg: Message, pos: SourcePosition - ) extends Diagnostic(msg, pos, INFO) + ) extends Diagnostic(msg, pos, INFO): + def this(str: => String, pos: SourcePosition) = this(str.toMessage, pos) abstract class ConditionalWarning( msg: Message, @@ -86,7 +89,7 @@ class Diagnostic( val msg: Message, val pos: SourcePosition, val level: Int -) extends Exception with interfaces.Diagnostic: +) extends interfaces.Diagnostic: private var verbose: Boolean = false def isVerbose: Boolean = verbose def setVerbose(): this.type = @@ -97,7 +100,8 @@ class Diagnostic( if (pos.exists && pos.source.exists) Optional.of(pos) else Optional.empty() override def message: String = msg.message.replaceAll("\u001B\\[[;\\d]*m", "") + override def diagnosticRelatedInformation: JList[interfaces.DiagnosticRelatedInformation] = + Collections.emptyList() override def toString: String = s"$getClass at $pos: $message" - override def getMessage(): String = message end Diagnostic diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 7b22eb77e90e..1fe38ce5e801 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -67,7 +67,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case AmbiguousOverloadID // errorNumber: 51 case ReassignmentToValID // errorNumber: 52 case TypeDoesNotTakeParametersID // errorNumber: 53 - case ParameterizedTypeLacksArgumentsID // errorNumber: 54 + case ParameterizedTypeLacksArgumentsID extends ErrorMessageID(isActive = false) // errorNumber: 54 case VarValParametersMayNotBeCallByNameID // errorNumber: 55 case MissingTypeParameterForID // errorNumber: 56 case DoesNotConformToBoundID // errorNumber: 57 @@ -176,7 +176,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case JavaEnumParentArgsID // errorNumber: 160 case AlreadyDefinedID // errorNumber: 161 case CaseClassInInlinedCodeID // errorNumber: 162 - case OverrideTypeMismatchErrorID // errorNumber: 163 + case OverrideTypeMismatchErrorID extends ErrorMessageID(isActive = false) // errorNumber: 163 case OverrideErrorID // errorNumber: 164 case MatchableWarningID // errorNumber: 165 case CannotExtendFunctionID // errorNumber: 166 @@ -184,7 +184,15 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case ImplicitSearchTooLargeID // errorNumber: 168 case TargetNameOnTopLevelClassID // errorNumber: 169 case NotClassTypeID // errorNumber 170 - + case MissingArgumentID // errorNumer 171 + case MissingImplicitArgumentID // errorNumber 172 + case CannotBeAccessedID // errorNumber 173 + case InlineGivenShouldNotBeFunctionID // errorNumber 174 + case ValueDiscardingID // errorNumber 175 + case UnusedNonUnitValueID // errorNumber 176 + case ConstrProxyShadowsID // errorNumber 177 + case MissingArgumentListID // errorNumber: 178 + def errorNumber = ordinal - 1 object ErrorMessageID: diff --git a/compiler/src/dotty/tools/dotc/reporting/Message.scala b/compiler/src/dotty/tools/dotc/reporting/Message.scala index 77e1336a990c..a1fe6773c1d2 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Message.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Message.scala @@ -2,23 +2,35 @@ package dotty.tools package dotc package reporting -import core.Contexts.*, core.Decorators.*, core.Mode +import core.* +import Contexts.*, Decorators.*, Symbols.*, Types.*, Flags.* +import printing.{RefinedPrinter, MessageLimiter, ErrorMessageLimiter} +import printing.Texts.Text +import printing.Formatting.hl import config.SourceVersion import scala.language.unsafeNulls import scala.annotation.threadUnsafe -object Message { - val nonSensicalStartTag: String = "" - val nonSensicalEndTag: String = "" - - /** This implicit conversion provides a fallback for error messages that have - * not yet been ported to the new scheme. Comment out this `implicit def` to - * see where old errors still exist - */ - implicit def toNoExplanation(str: => String): Message = NoExplanation(str) - +/** ## Tips for error message generation + * + * - You can use the `em` interpolator for error messages. It's defined in core.Decorators. + * - You can also use a simple string argument for `error` or `warning` (not for the other variants), + * but the string should not be interpolated or composed of objects that require a + * Context for evaluation. + * - When embedding interpolated substrings defined elsewhere in error messages, + * use `i` and make sure they are defined as def's instead of vals. That way, the + * possibly expensive interpolation will performed only in the case where the message + * is eventually printed. Note: At least during typer, it's common for messages + * to be discarded without being printed. Also, by making them defs, you ensure that + * they will be evaluated in the Message context, which makes formatting safer + * and more robust. + * - For common messages, or messages that might require explanation, prefer defining + * a new `Message` class in file `messages.scala` and use that instead. The advantage is that these + * messages have unique IDs that can be referenced elsewhere. + */ +object Message: def rewriteNotice(what: String, version: SourceVersion | Null = null, options: String = "")(using Context): String = if !ctx.mode.is(Mode.Interactive) then val sourceStr = if version != null then i"-source $version" else "" @@ -28,7 +40,188 @@ object Message { else i"$sourceStr $options" i"\n$what can be rewritten automatically under -rewrite $optionStr." else "" -} + + private type Recorded = Symbol | ParamRef | SkolemType + + private case class SeenKey(str: String, isType: Boolean) + + /** A class that records printed items of one of the types in `Recorded`, + * adds superscripts for disambiguations, and can explain recorded symbols + * in ` where` clause + */ + private class Seen(disambiguate: Boolean): + + val seen = new collection.mutable.HashMap[SeenKey, List[Recorded]]: + override def default(key: SeenKey) = Nil + + var nonSensical = false + + /** If false, stop all recordings */ + private var recordOK = disambiguate + + /** Clear all entries and stop further entries to be added */ + def disable() = + seen.clear() + recordOK = false + + /** Record an entry `entry` with given String representation `str` and a + * type/term namespace identified by `isType`. + * If the entry was not yet recorded, allocate the next superscript corresponding + * to the same string in the same name space. The first recording is the string proper + * and following recordings get consecutive superscripts starting with 2. + * @return The possibly superscripted version of `str`. + */ + def record(str: String, isType: Boolean, entry: Recorded)(using Context): String = + if !recordOK then return str + //println(s"recording $str, $isType, $entry") + + /** If `e1` is an alias of another class of the same name, return the other + * class symbol instead. This normalization avoids recording e.g. scala.List + * and scala.collection.immutable.List as two different types + */ + def followAlias(e1: Recorded): Recorded = e1 match { + case e1: Symbol if e1.isAliasType => + val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol + if (underlying.name == e1.name) underlying else e1 + case _ => e1 + } + val key = SeenKey(str, isType) + val existing = seen(key) + lazy val dealiased = followAlias(entry) + + // alts: The alternatives in `existing` that are equal, or follow (an alias of) `entry` + var alts = existing.dropWhile(alt => dealiased ne followAlias(alt)) + if alts.isEmpty then + alts = entry :: existing + seen(key) = alts + + val suffix = alts.length match { + case 1 => "" + case n => n.toString.toCharArray.map { + case '0' => '⁰' + case '1' => '¹' + case '2' => '²' + case '3' => '³' + case '4' => '⁴' + case '5' => '⁵' + case '6' => '⁶' + case '7' => '⁷' + case '8' => '⁸' + case '9' => '⁹' + }.mkString + } + str + suffix + end record + + /** Create explanation for single `Recorded` type or symbol */ + private def explanation(entry: AnyRef)(using Context): String = + def boundStr(bound: Type, default: ClassSymbol, cmp: String) = + if (bound.isRef(default)) "" else i"$cmp $bound" + + def boundsStr(bounds: TypeBounds): String = { + val lo = boundStr(bounds.lo, defn.NothingClass, ">:") + val hi = boundStr(bounds.hi, defn.AnyClass, "<:") + if (lo.isEmpty) hi + else if (hi.isEmpty) lo + else s"$lo and $hi" + } + + def addendum(cat: String, info: Type): String = info match { + case bounds @ TypeBounds(lo, hi) if bounds ne TypeBounds.empty => + if (lo eq hi) i" which is an alias of $lo" + else i" with $cat ${boundsStr(bounds)}" + case _ => + "" + } + + entry match { + case param: TypeParamRef => + s"is a type variable${addendum("constraint", TypeComparer.bounds(param))}" + case param: TermParamRef => + s"is a reference to a value parameter" + case sym: Symbol => + val info = + if (ctx.gadt.contains(sym)) + sym.info & ctx.gadt.fullBounds(sym) + else + sym.info + s"is a ${ctx.printer.kindString(sym)}${sym.showExtendedLocation}${addendum("bounds", info)}" + case tp: SkolemType => + s"is an unknown value of type ${tp.widen.show}" + } + end explanation + + /** Produce a where clause with explanations for recorded iterms. + */ + def explanations(using Context): String = + def needsExplanation(entry: Recorded) = entry match { + case param: TypeParamRef => ctx.typerState.constraint.contains(param) + case param: ParamRef => false + case skolem: SkolemType => true + case sym: Symbol => + ctx.gadt.contains(sym) && ctx.gadt.fullBounds(sym) != TypeBounds.empty + } + + val toExplain: List[(String, Recorded)] = seen.toList.flatMap { kvs => + val res: List[(String, Recorded)] = kvs match { + case (key, entry :: Nil) => + if (needsExplanation(entry)) (key.str, entry) :: Nil else Nil + case (key, entries) => + for (alt <- entries) yield { + val tickedString = record(key.str, key.isType, alt) + (tickedString, alt) + } + } + res // help the inferrencer out + }.sortBy(_._1) + + def columnar(parts: List[(String, String)]): List[String] = { + lazy val maxLen = parts.map(_._1.length).max + parts.map { + case (leader, trailer) => + val variable = hl(leader) + s"""$variable${" " * (maxLen - leader.length)} $trailer""" + } + } + + val explainParts = toExplain.map { case (str, entry) => (str, explanation(entry)) } + val explainLines = columnar(explainParts) + if (explainLines.isEmpty) "" else i"where: $explainLines%\n %\n" + end explanations + end Seen + + /** Printer to be used when formatting messages */ + private class Printer(val seen: Seen, _ctx: Context) extends RefinedPrinter(_ctx): + + /** True if printer should a show source module instead of its module class */ + private def useSourceModule(sym: Symbol): Boolean = + sym.is(ModuleClass, butNot = Package) && sym.sourceModule.exists && !_ctx.settings.YdebugNames.value + + override def simpleNameString(sym: Symbol): String = + if useSourceModule(sym) then simpleNameString(sym.sourceModule) + else seen.record(super.simpleNameString(sym), sym.isType, sym) + + override def ParamRefNameString(param: ParamRef): String = + seen.record(super.ParamRefNameString(param), param.isInstanceOf[TypeParamRef], param) + + override def toTextRef(tp: SingletonType): Text = tp match + case tp: SkolemType => seen.record(tp.repr.toString, isType = true, tp) + case _ => super.toTextRef(tp) + + override def toText(tp: Type): Text = + if !tp.exists || tp.isErroneous then seen.nonSensical = true + tp match + case tp: TypeRef if useSourceModule(tp.symbol) => Str("object ") ~ super.toText(tp) + case _ => super.toText(tp) + + override def toText(sym: Symbol): Text = + sym.infoOrCompleter match + case _: ErrorType | TypeAlias(_: ErrorType) | NoType => seen.nonSensical = true + case _ => + super.toText(sym) + end Printer + +end Message /** A `Message` contains all semantic information necessary to easily * comprehend what caused the message to be logged. Each message can be turned @@ -45,9 +238,41 @@ object Message { * * @param errorId a unique id identifying the message, this will be * used to reference documentation online + * + * Messages modify the rendendering of interpolated strings in several ways: + * + * 1. The size of the printed code is limited with a MessafeLimiter. If the message + * would get too large or too deeply nested, a `...` is printed instead. + * 2. References to module classes are prefixed with `object ` for better recogniability. + * 3. A where clause is sometimes added which contains the following additional explanations: + * - Rerences are disambiguated: If a message contains occurrences of the same identifier + * representing different symbols, the duplicates are printed with superscripts + * and the where-clause explains where each symbol is located. + * - Uninstantiated variables are explained in the where-clause with additional + * info about their bounds. + * - Skolems are explained with additional info about their underlying type. + * + * Messages inheriting from the NoDisambiguation trait or returned from the + * `noDisambiguation()` method skip point (3) above. This makes sense if the + * message already exolains where different occurrences of the same identifier + * are located. Examples are NamingMsgs such as double definition errors, + * overriding errors, and ambiguous implicit errors. + * + * We consciously made the design decision to disambiguate by default and disable + * disambiguation as an opt-in. The reason is that one usually does not consider all + * fine-grained details when writing an error message. If disambiguation is the default, + * some tests will show where clauses that look too noisy and that then can be disabled + * when needed. But if silence is the default, one usually does not realize that + * better info could be obtained by turning disambiguation on. */ -abstract class Message(val errorId: ErrorMessageID) { self => - import Message._ +abstract class Message(val errorId: ErrorMessageID)(using Context) { self => + import Message.* + + /** The kind of the error message, e.g. "Syntax" or "Type Mismatch". + * This will be printed as "$kind Error", "$kind Warning", etc, on the first + * line of the message. + */ + def kind: MessageKind /** The `msg` contains the diagnostic message e.g: * @@ -58,22 +283,27 @@ abstract class Message(val errorId: ErrorMessageID) { self => * `Diagnostic`. The message is given in raw form, with possible embedded * tags. */ - protected def msg: String - - /** The kind of the error message, e.g. "Syntax" or "Type Mismatch". - * This will be printed as "$kind Error", "$kind Warning", etc, on the first - * line of the message. - */ - def kind: MessageKind + protected def msg(using Context): String /** The explanation should provide a detailed description of why the error * occurred and use examples from the user's own code to illustrate how to * avoid these errors. It might contain embedded tags. */ - protected def explain: String + protected def explain(using Context): String - /** A message suffix that can be added for certain subclasses */ - protected def msgSuffix: String = "" + /** What gets printed after the message proper */ + protected def msgPostscript(using Context): String = + if ctx eq NoContext then "" + else ctx.printer match + case msgPrinter: Message.Printer => + myIsNonSensical = msgPrinter.seen.nonSensical + val addendum = msgPrinter.seen.explanations + msgPrinter.seen.disable() + // Clear entries and stop futher recording so that messages containing the current + // one don't repeat the explanations or use explanations from the msgPostscript. + if addendum.isEmpty then "" else "\n\n" ++ addendum + case _ => + "" /** Does this message have an explanation? * This is normally the same as `explain.nonEmpty` but can be overridden @@ -82,61 +312,69 @@ abstract class Message(val errorId: ErrorMessageID) { self => */ def canExplain: Boolean = explain.nonEmpty - private var myMsg: String | Null = null private var myIsNonSensical: Boolean = false - private def dropNonSensical(msg: String): String = - if msg.contains(nonSensicalStartTag) then - myIsNonSensical = true - // myMsg might be composed of several d"..." invocations -> nested - // nonsensical tags possible - msg - .replace(nonSensicalStartTag, "") - .replace(nonSensicalEndTag, "") - else msg + /** A message is non-sensical if it contains references to internally + * generated error types. Normally we want to suppress error messages + * referring to types like this because they look weird and are normally + * follow-up errors to something that was diagnosed before. + */ + def isNonSensical: Boolean = { message; myIsNonSensical } + + private var disambiguate: Boolean = true + + def withoutDisambiguation(): this.type = + disambiguate = false + this - /** The message with potential embedded tags */ - def rawMessage = message + private def inMessageContext(disambiguate: Boolean)(op: Context ?=> String): String = + if ctx eq NoContext then op + else + val msgContext = ctx.printer match + case _: Message.Printer => ctx + case _ => + val seen = Seen(disambiguate) + val ctx1 = ctx.fresh.setPrinterFn(Message.Printer(seen, _)) + if !ctx1.property(MessageLimiter).isDefined then + ctx1.setProperty(MessageLimiter, ErrorMessageLimiter()) + ctx1 + op(using msgContext) /** The message to report. tags are filtered out */ - @threadUnsafe lazy val message: String = dropNonSensical(msg + msgSuffix) + @threadUnsafe lazy val message: String = + inMessageContext(disambiguate)(msg + msgPostscript) /** The explanation to report. tags are filtered out */ - @threadUnsafe lazy val explanation: String = dropNonSensical(explain) - - /** A message is non-sensical if it contains references to - * tags. Such tags are inserted by the error diagnostic framework if a - * message contains references to internally generated error types. Normally - * we want to suppress error messages referring to types like this because - * they look weird and are normally follow-up errors to something that was - * diagnosed before. - */ - def isNonSensical: Boolean = { message; myIsNonSensical } + @threadUnsafe lazy val explanation: String = + inMessageContext(disambiguate = false)(explain) /** The implicit `Context` in messages is a large thing that we don't want * persisted. This method gets around that by duplicating the message, * forcing its `msg` and `explanation` vals and dropping the implicit context * that was captured in the original message. */ - def persist: Message = new Message(errorId) { - val kind = self.kind - val msg = self.msg - val explain = self.explain + def persist: Message = new Message(errorId)(using NoContext): + val kind = self.kind + private val persistedMsg = self.message + private val persistedExplain = self.explanation + def msg(using Context) = persistedMsg + def explain(using Context) = persistedExplain override val canExplain = self.canExplain - } + override def isNonSensical = self.isNonSensical def append(suffix: => String): Message = mapMsg(_ ++ suffix) + def prepend(prefix: => String): Message = mapMsg(prefix ++ _) def mapMsg(f: String => String): Message = new Message(errorId): - val kind = self.kind - def msg = f(self.msg) - def explain = self.explain + val kind = self.kind + def msg(using Context) = f(self.msg) + def explain(using Context) = self.explain override def canExplain = self.canExplain def appendExplanation(suffix: => String): Message = new Message(errorId): - val kind = self.kind - def msg = self.msg - def explain = self.explain ++ suffix + val kind = self.kind + def msg(using Context) = self.msg + def explain(using Context) = self.explain ++ suffix override def canExplain = true /** Override with `true` for messages that should always be shown even if their @@ -149,10 +387,14 @@ abstract class Message(val errorId: ErrorMessageID) { self => override def toString = msg } +/** A marker trait that suppresses generation of `where` clause for disambiguations */ +trait NoDisambiguation extends Message: + withoutDisambiguation() + /** The fallback `Message` containing no explanation and having no `kind` */ -class NoExplanation(msgFn: => String) extends Message(ErrorMessageID.NoExplanationID) { - def msg: String = msgFn - def explain: String = "" +final class NoExplanation(msgFn: Context ?=> String)(using Context) extends Message(ErrorMessageID.NoExplanationID) { + def msg(using Context): String = msgFn + def explain(using Context): String = "" val kind: MessageKind = MessageKind.NoKind override def toString(): String = msg diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index 0d5acaef4960..f5aadac27296 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -14,6 +14,7 @@ import dotty.tools.dotc.util.NoSourcePosition import java.io.{BufferedReader, PrintWriter} import scala.annotation.internal.sharable import scala.collection.mutable +import core.Decorators.em object Reporter { /** Convert a SimpleReporter into a real Reporter */ @@ -217,7 +218,7 @@ abstract class Reporter extends interfaces.ReporterResult { def summarizeUnreportedWarnings()(using Context): Unit = for (settingName, count) <- unreportedWarnings do val were = if count == 1 then "was" else "were" - val msg = s"there $were ${countString(count, settingName.tail + " warning")}; re-run with $settingName for details" + val msg = em"there $were ${countString(count, settingName.tail + " warning")}; re-run with $settingName for details" report(Warning(msg, NoSourcePosition)) /** Print the summary of warnings and errors */ diff --git a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala index ad47a9d30536..153212522541 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala @@ -6,12 +6,16 @@ import core.Contexts._ import Diagnostic.Error /** - * This class implements a Reporter that throws all errors and sends warnings and other - * info to the underlying reporter. + * This class implements a Reporter that throws all errors as UnhandledError exceptions + * and sends warnings and other info to the underlying reporter. */ class ThrowingReporter(reportInfo: Reporter) extends Reporter { def doReport(dia: Diagnostic)(using Context): Unit = dia match { - case _: Error => throw dia + case dia: Error => throw UnhandledError(dia) case _ => reportInfo.doReport(dia) } } + +class UnhandledError(val diagnostic: Error) extends Exception: + override def getMessage = diagnostic.message + diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index 21e10e894e0b..af1a5c0f0f47 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -18,7 +18,7 @@ enum MessageFilter: case Feature => message.isInstanceOf[Diagnostic.FeatureWarning] case Unchecked => message.isInstanceOf[Diagnostic.UncheckedWarning] case MessagePattern(pattern) => - val noHighlight = message.msg.rawMessage.replaceAll("\\e\\[[\\d;]*[^\\d;]","") + val noHighlight = message.msg.message.replaceAll("\\e\\[[\\d;]*[^\\d;]","") pattern.findFirstIn(noHighlight).nonEmpty case MessageID(errorId) => message.msg.errorId == errorId case None => false diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index a3af4c1b2582..fba08fd84d0c 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -15,9 +15,10 @@ import printing.Formatting import ErrorMessageID._ import ast.Trees import config.{Feature, ScalaVersion} -import typer.ErrorReporting.{err, matchReductionAddendum} +import typer.ErrorReporting.{err, matchReductionAddendum, substitutableTypeSymbolsInScope} import typer.ProtoTypes.ViewProto -import typer.Implicits.Candidate +import typer.Implicits.* +import typer.Inferencing import scala.util.control.NonFatal import StdNames.nme import printing.Formatting.hl @@ -25,6 +26,8 @@ import ast.Trees._ import ast.untpd import ast.tpd import transform.SymUtils._ +import scala.util.matching.Regex +import java.util.regex.Matcher.quoteReplacement import cc.CaptureSet.IdentityCaptRefMap /** Messages @@ -40,211 +43,212 @@ import cc.CaptureSet.IdentityCaptRefMap * ``` */ - abstract class SyntaxMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Syntax +abstract class SyntaxMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Syntax - abstract class TypeMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Type +abstract class TypeMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Type - trait ShowMatchTrace(tps: Type*)(using Context) extends Message: - override def msgSuffix: String = matchReductionAddendum(tps*) +trait ShowMatchTrace(tps: Type*)(using Context) extends Message: + override def msgPostscript(using Context): String = + super.msgPostscript ++ matchReductionAddendum(tps*) - abstract class TypeMismatchMsg(found: Type, expected: Type)(errorId: ErrorMessageID)(using Context) - extends Message(errorId), ShowMatchTrace(found, expected): - def kind = MessageKind.TypeMismatch - def explain = err.whyNoMatchStr(found, expected) - override def canExplain = true +abstract class TypeMismatchMsg(found: Type, expected: Type)(errorId: ErrorMessageID)(using Context) +extends Message(errorId), ShowMatchTrace(found, expected): + def kind = MessageKind.TypeMismatch + def explain(using Context) = err.whyNoMatchStr(found, expected) + override def canExplain = true - abstract class NamingMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Naming +abstract class NamingMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId), NoDisambiguation: + def kind = MessageKind.Naming - abstract class DeclarationMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Declaration +abstract class DeclarationMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Declaration - /** A simple not found message (either for idents, or member selection. - * Messages of this class are sometimes dropped in favor of other, more - * specific messages. - */ - abstract class NotFoundMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.NotFound - def name: Name - - abstract class PatternMatchMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.PatternMatch +/** A simple not found message (either for idents, or member selection. + * Messages of this class are sometimes dropped in favor of other, more + * specific messages. + */ +abstract class NotFoundMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.NotFound + def name: Name - abstract class CyclicMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Cyclic +abstract class PatternMatchMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.PatternMatch - abstract class ReferenceMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Reference +abstract class CyclicMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Cyclic - abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: ErrorMessageID)(using Context) - extends SyntaxMsg(errNo) { - def explain = { - val tryString = tryBody match { - case Block(Nil, untpd.EmptyTree) => "{}" - case _ => tryBody.show - } +abstract class ReferenceMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Reference - val code1 = - s"""|import scala.util.control.NonFatal - | - |try $tryString catch { - | case NonFatal(e) => ??? - |}""".stripMargin - - val code2 = - s"""|try $tryString finally { - | // perform your cleanup here! - |}""".stripMargin - - em"""|A ${hl("try")} expression should be followed by some mechanism to handle any exceptions - |thrown. Typically a ${hl("catch")} expression follows the ${hl("try")} and pattern matches - |on any expected exceptions. For example: - | - |$code1 - | - |It is also possible to follow a ${hl("try")} immediately by a ${hl("finally")} - letting the - |exception propagate - but still allowing for some clean up in ${hl("finally")}: - | - |$code2 - | - |It is recommended to use the ${hl("NonFatal")} extractor to catch all exceptions as it - |correctly handles transfer functions like ${hl("return")}.""" +abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: ErrorMessageID)(using Context) +extends SyntaxMsg(errNo) { + def explain(using Context) = { + val tryString = tryBody match { + case Block(Nil, untpd.EmptyTree) => "{}" + case _ => tryBody.show } - } - - class EmptyCatchBlock(tryBody: untpd.Tree)(using Context) - extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchBlockID) { - def msg = - em"""|The ${hl("catch")} block does not contain a valid expression, try - |adding a case like - ${hl("case e: Exception =>")} to the block""" - } - - class EmptyCatchAndFinallyBlock(tryBody: untpd.Tree)(using Context) - extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID) { - def msg = - em"""|A ${hl("try")} without ${hl("catch")} or ${hl("finally")} is equivalent to putting - |its body in a block; no exceptions are handled.""" - } - class DeprecatedWithOperator()(using Context) - extends SyntaxMsg(DeprecatedWithOperatorID) { - def msg = - em"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead""" - def explain = - em"""|Dotty introduces intersection types - ${hl("&")} types. These replace the - |use of the ${hl("with")} keyword. There are a few differences in - |semantics between intersection types and using ${hl("with")}.""" - } - - class CaseClassMissingParamList(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(CaseClassMissingParamListID) { - def msg = - em"""|A ${hl("case class")} must have at least one parameter list""" - - def explain = - em"""|${cdef.name} must have at least one parameter list, if you would rather - |have a singleton representation of ${cdef.name}, use a "${hl("case object")}". - |Or, add an explicit ${hl("()")} as a parameter list to ${cdef.name}.""" - } - - class AnonymousFunctionMissingParamType(param: untpd.ValDef, - tree: untpd.Function, - pt: Type) - (using Context) - extends TypeMsg(AnonymousFunctionMissingParamTypeID) { - def msg = { - val ofFun = - if param.name.is(WildcardParamName) - || (MethodType.syntheticParamNames(tree.args.length + 1) contains param.name) - then i" of expanded function:\n$tree" - else "" + val code1 = + s"""|import scala.util.control.NonFatal + | + |try $tryString catch { + | case NonFatal(e) => ??? + |}""".stripMargin - val inferred = - if (pt == WildcardType) "" - else i"\nWhat I could infer was: $pt" + val code2 = + s"""|try $tryString finally { + | // perform your cleanup here! + |}""".stripMargin - i"""Missing parameter type - | - |I could not infer the type of the parameter ${param.name}$ofFun.$inferred""" - } + i"""|A ${hl("try")} expression should be followed by some mechanism to handle any exceptions + |thrown. Typically a ${hl("catch")} expression follows the ${hl("try")} and pattern matches + |on any expected exceptions. For example: + | + |$code1 + | + |It is also possible to follow a ${hl("try")} immediately by a ${hl("finally")} - letting the + |exception propagate - but still allowing for some clean up in ${hl("finally")}: + | + |$code2 + | + |It is recommended to use the ${hl("NonFatal")} extractor to catch all exceptions as it + |correctly handles transfer functions like ${hl("return")}.""" + } +} + +class EmptyCatchBlock(tryBody: untpd.Tree)(using Context) +extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchBlockID) { + def msg(using Context) = + i"""|The ${hl("catch")} block does not contain a valid expression, try + |adding a case like - ${hl("case e: Exception =>")} to the block""" +} + +class EmptyCatchAndFinallyBlock(tryBody: untpd.Tree)(using Context) +extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID) { + def msg(using Context) = + i"""|A ${hl("try")} without ${hl("catch")} or ${hl("finally")} is equivalent to putting + |its body in a block; no exceptions are handled.""" +} + +class DeprecatedWithOperator()(using Context) +extends SyntaxMsg(DeprecatedWithOperatorID) { + def msg(using Context) = + i"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead""" + def explain(using Context) = + i"""|Dotty introduces intersection types - ${hl("&")} types. These replace the + |use of the ${hl("with")} keyword. There are a few differences in + |semantics between intersection types and using ${hl("with")}.""" +} + +class CaseClassMissingParamList(cdef: untpd.TypeDef)(using Context) +extends SyntaxMsg(CaseClassMissingParamListID) { + def msg(using Context) = + i"""|A ${hl("case class")} must have at least one parameter list""" + + def explain(using Context) = + i"""|${cdef.name} must have at least one parameter list, if you would rather + |have a singleton representation of ${cdef.name}, use a "${hl("case object")}". + |Or, add an explicit ${hl("()")} as a parameter list to ${cdef.name}.""" +} + +class AnonymousFunctionMissingParamType(param: untpd.ValDef, + tree: untpd.Function, + pt: Type) + (using Context) +extends TypeMsg(AnonymousFunctionMissingParamTypeID) { + def msg(using Context) = { + val ofFun = + if param.name.is(WildcardParamName) + || (MethodType.syntheticParamNames(tree.args.length + 1) contains param.name) + then i" of expanded function:\n$tree" + else "" - def explain = "" - } + val inferred = + if (pt == WildcardType) "" + else i"\nWhat I could infer was: $pt" - class WildcardOnTypeArgumentNotAllowedOnNew()(using Context) - extends SyntaxMsg(WildcardOnTypeArgumentNotAllowedOnNewID) { - def msg = "Type argument must be fully defined" - def explain = - val code1: String = - """ - |object TyperDemo { - | class Team[A] - | val team = new Team[?] - |} - """.stripMargin + i"""Missing parameter type + | + |I could not infer the type of the parameter ${param.name}$ofFun.$inferred""" + } + + def explain(using Context) = "" +} + +class WildcardOnTypeArgumentNotAllowedOnNew()(using Context) +extends SyntaxMsg(WildcardOnTypeArgumentNotAllowedOnNewID) { + def msg(using Context) = "Type argument must be fully defined" + def explain(using Context) = + val code1: String = + """ + |object TyperDemo { + | class Team[A] + | val team = new Team[?] + |} + """.stripMargin - val code2: String = - """ - |object TyperDemo { - | class Team[A] - | val team = new Team[Int] - |} - """.stripMargin - em"""|Wildcard on arguments is not allowed when declaring a new type. - | - |Given the following example: - | - |$code1 - | - |You must complete all the type parameters, for instance: - | - |$code2 """ - } + val code2: String = + """ + |object TyperDemo { + | class Team[A] + | val team = new Team[Int] + |} + """.stripMargin + i"""|Wildcard on arguments is not allowed when declaring a new type. + | + |Given the following example: + | + |$code1 + | + |You must complete all the type parameters, for instance: + | + |$code2 """ +} - // Type Errors ------------------------------------------------------------ // - class DuplicateBind(bind: untpd.Bind, tree: untpd.CaseDef)(using Context) - extends NamingMsg(DuplicateBindID) { - def msg = em"duplicate pattern variable: ${bind.name}" +// Type Errors ------------------------------------------------------------ // +class DuplicateBind(bind: untpd.Bind, tree: untpd.CaseDef)(using Context) +extends NamingMsg(DuplicateBindID) { + def msg(using Context) = i"duplicate pattern variable: ${bind.name}" - def explain = { - val pat = tree.pat.show - val guard = tree.guard match { - case untpd.EmptyTree => "" - case guard => s"if ${guard.show}" - } + def explain(using Context) = { + val pat = tree.pat.show + val guard = tree.guard match + case untpd.EmptyTree => "" + case guard => s"if ${guard.show}" - val body = tree.body match { - case Block(Nil, untpd.EmptyTree) => "" - case body => s" ${body.show}" - } + val body = tree.body match { + case Block(Nil, untpd.EmptyTree) => "" + case body => s" ${body.show}" + } - val caseDef = s"case $pat$guard => $body" + val caseDef = s"case $pat$guard => $body" - em"""|For each ${hl("case")} bound variable names have to be unique. In: - | - |$caseDef - | - |${bind.name} is not unique. Rename one of the bound variables!""" - } + i"""|For each ${hl("case")} bound variable names have to be unique. In: + | + |$caseDef + | + |${bind.name} is not unique. Rename one of the bound variables!""" } +} - class MissingIdent(tree: untpd.Ident, treeKind: String, val name: Name)(using Context) - extends NotFoundMsg(MissingIdentID) { - def msg = em"Not found: $treeKind$name" - def explain = { - em"""|The identifier for `$treeKind$name` is not bound, that is, - |no declaration for this identifier can be found. - |That can happen, for example, if `$name` or its declaration has either been - |misspelt or if an import is missing.""" - } +class MissingIdent(tree: untpd.Ident, treeKind: String, val name: Name)(using Context) +extends NotFoundMsg(MissingIdentID) { + def msg(using Context) = i"Not found: $treeKind$name" + def explain(using Context) = { + i"""|The identifier for `$treeKind$name` is not bound, that is, + |no declaration for this identifier can be found. + |That can happen, for example, if `$name` or its declaration has either been + |misspelt or if an import is missing.""" } +} - class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) - extends TypeMismatchMsg(found, expected)(TypeMismatchID): +class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) + extends TypeMismatchMsg(found, expected)(TypeMismatchID): + def msg(using Context) = // replace constrained TypeParamRefs and their typevars by their bounds where possible // and the bounds are not f-bounds. // The idea is that if the bounds are also not-subtypes of each other to report @@ -272,2266 +276,2605 @@ import cc.CaptureSet.IdentityCaptRefMap case _ => mapOver(tp) - def msg = - val found1 = reported(found) - reported.setVariance(-1) - val expected1 = reported(expected) - val (found2, expected2) = - if (found1 frozen_<:< expected1) || reported.fbounded then (found, expected) - else (found1, expected1) - val postScript = addenda.find(!_.isEmpty) match - case Some(p) => p - case None => - if expected.isTopType || found.isBottomType - then "" - else ctx.typer.importSuggestionAddendum(ViewProto(found.widen, expected)) - val (where, printCtx) = Formatting.disambiguateTypes(found2, expected2) - val whereSuffix = if (where.isEmpty) where else s"\n\n$where" - val (foundStr, expectedStr) = Formatting.typeDiff(found2, expected2)(using printCtx) - s"""|Found: $foundStr - |Required: $expectedStr""".stripMargin - + whereSuffix + postScript - - override def explain = - val treeStr = inTree.map(x => s"\nTree: ${x.show}").getOrElse("") - treeStr + "\n" + super.explain - - end TypeMismatch - - class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context) - extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { - //println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG - - def msg = { - import core.Flags._ - val maxDist = 3 // maximal number of differences to be considered for a hint - val missing = name.show - - // The symbols of all non-synthetic, non-private members of `site` - // that are of the same type/term kind as the missing member. - def candidates: Set[Symbol] = - for - bc <- site.widen.baseClasses.toSet - sym <- bc.info.decls.filter(sym => - sym.isType == name.isTypeName - && !sym.isConstructor - && !sym.flagsUNSAFE.isOneOf(Synthetic | Private)) - yield sym - - // Calculate Levenshtein distance - def distance(s1: String, s2: String): Int = - val dist = Array.ofDim[Int](s2.length + 1, s1.length + 1) - for - j <- 0 to s2.length - i <- 0 to s1.length - do - dist(j)(i) = - if j == 0 then i - else if i == 0 then j - else if s2(j - 1) == s1(i - 1) then dist(j - 1)(i - 1) - else (dist(j - 1)(i) min dist(j)(i - 1) min dist(j - 1)(i - 1)) + 1 - dist(s2.length)(s1.length) - - // A list of possible candidate symbols with their Levenstein distances - // to the name of the missing member - def closest: List[(Int, Symbol)] = candidates - .toList - .map(sym => (distance(sym.name.show, missing), sym)) - .filter((d, sym) => d <= maxDist && d < missing.length && d < sym.name.show.length) - .sortBy((d, sym) => (d, sym.name.show)) // sort by distance first, alphabetically second - - val enumClause = - if ((name eq nme.values) || (name eq nme.valueOf)) && site.classSymbol.companionClass.isEnumClass then - val kind = if name eq nme.values then i"${nme.values} array" else i"${nme.valueOf} lookup method" - // an assumption is made here that the values and valueOf methods were not generated - // because the enum defines non-singleton cases - i""" - |Although ${site.classSymbol.companionClass} is an enum, it has non-singleton cases, - |meaning a $kind is not defined""" - else - "" - - def prefixEnumClause(addendum: String) = - if enumClause.nonEmpty then s".$enumClause$addendum" else addendum - - val finalAddendum = - if addendum.nonEmpty then prefixEnumClause(addendum) - else closest match - case (d, sym) :: _ => - val siteName = site match - case site: NamedType => site.name.show - case site => i"$site" - val showName = - // Add .type to the name if it is a module - if sym.is(ModuleClass) then s"${sym.name.show}.type" - else sym.name.show - s" - did you mean $siteName.$showName?$enumClause" - case Nil => prefixEnumClause("") - - ex"$selected $name is not a member of ${site.widen}$finalAddendum" - } - - def explain = "" - } - - class EarlyDefinitionsNotSupported()(using Context) - extends SyntaxMsg(EarlyDefinitionsNotSupportedID) { - def msg = "Early definitions are not supported; use trait parameters instead" - - def explain = { - val code1 = - """|trait Logging { - | val f: File - | f.open() - | onExit(f.close()) - | def log(msg: String) = f.write(msg) - |} - | - |class B extends Logging { - | val f = new File("log.data") // triggers a NullPointerException - |} - | - |// early definition gets around the NullPointerException - |class C extends { - | val f = new File("log.data") - |} with Logging""".stripMargin - - val code2 = - """|trait Logging(f: File) { - | f.open() - | onExit(f.close()) - | def log(msg: String) = f.write(msg) - |} - | - |class C extends Logging(new File("log.data"))""".stripMargin - - em"""|Earlier versions of Scala did not support trait parameters and "early - |definitions" (also known as "early initializers") were used as an alternative. - | - |Example of old syntax: - | - |$code1 - | - |The above code can now be written as: - | - |$code2 - |""" - } - } - - class TopLevelImplicitClass(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(TopLevelImplicitClassID) { - def msg = em"""An ${hl("implicit class")} may not be top-level""" - - def explain = { - val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef: @unchecked - val exampleArgs = - if(constr0.termParamss.isEmpty) "..." - else constr0.termParamss(0).map(_.withMods(untpd.Modifiers()).show).mkString(", ") - def defHasBody[T] = impl.body.exists(!_.isEmpty) - val exampleBody = if (defHasBody) "{\n ...\n }" else "" - em"""|There may not be any method, member or object in scope with the same name as - |the implicit class and a case class automatically gets a companion object with - |the same name created by the compiler which would cause a naming conflict if it - |were allowed. - | | - |To resolve the conflict declare ${cdef.name} inside of an ${hl("object")} then import the class - |from the object at the use site if needed, for example: - | - |object Implicits { - | implicit class ${cdef.name}($exampleArgs)$exampleBody - |} - | - |// At the use site: - |import Implicits.${cdef.name}""" - } - } - - class ImplicitCaseClass(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(ImplicitCaseClassID) { - def msg = em"""A ${hl("case class")} may not be defined as ${hl("implicit")}""" - - def explain = - em"""|Implicit classes may not be case classes. Instead use a plain class: - | - |implicit class ${cdef.name}... - | - |""" - } + val found1 = reported(found) + reported.setVariance(-1) + val expected1 = reported(expected) + val (found2, expected2) = + if (found1 frozen_<:< expected1) || reported.fbounded then (found, expected) + else (found1, expected1) + val (foundStr, expectedStr) = Formatting.typeDiff(found2, expected2) + i"""|Found: $foundStr + |Required: $expectedStr""" + end msg + + override def msgPostscript(using Context) = + def importSuggestions = + if expected.isTopType || found.isBottomType then "" + else ctx.typer.importSuggestionAddendum(ViewProto(found.widen, expected)) + super.msgPostscript + ++ addenda.dropWhile(_.isEmpty).headOption.getOrElse(importSuggestions) + + override def explain(using Context) = + val treeStr = inTree.map(x => s"\nTree: ${x.show}").getOrElse("") + treeStr + "\n" + super.explain + +end TypeMismatch + +class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context) +extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { + //println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG + + def msg(using Context) = { + import core.Flags._ + val maxDist = 3 // maximal number of differences to be considered for a hint + val missing = name.show + + // The symbols of all non-synthetic, non-private members of `site` + // that are of the same type/term kind as the missing member. + def candidates: Set[Symbol] = + for + bc <- site.widen.baseClasses.toSet + sym <- bc.info.decls.filter(sym => + sym.isType == name.isTypeName + && !sym.isConstructor + && !sym.flagsUNSAFE.isOneOf(Synthetic | Private)) + yield sym + + // Calculate Levenshtein distance + def distance(s1: String, s2: String): Int = + val dist = Array.ofDim[Int](s2.length + 1, s1.length + 1) + for + j <- 0 to s2.length + i <- 0 to s1.length + do + dist(j)(i) = + if j == 0 then i + else if i == 0 then j + else if s2(j - 1) == s1(i - 1) then dist(j - 1)(i - 1) + else (dist(j - 1)(i) min dist(j)(i - 1) min dist(j - 1)(i - 1)) + 1 + dist(s2.length)(s1.length) + + // A list of possible candidate symbols with their Levenstein distances + // to the name of the missing member + def closest: List[(Int, Symbol)] = candidates + .toList + .map(sym => (distance(sym.name.show, missing), sym)) + .filter((d, sym) => d <= maxDist && d < missing.length && d < sym.name.show.length) + .sortBy((d, sym) => (d, sym.name.show)) // sort by distance first, alphabetically second + + val enumClause = + if ((name eq nme.values) || (name eq nme.valueOf)) && site.classSymbol.companionClass.isEnumClass then + val kind = if name eq nme.values then i"${nme.values} array" else i"${nme.valueOf} lookup method" + // an assumption is made here that the values and valueOf methods were not generated + // because the enum defines non-singleton cases + i""" + |Although ${site.classSymbol.companionClass} is an enum, it has non-singleton cases, + |meaning a $kind is not defined""" + else + "" - class ImplicitClassPrimaryConstructorArity()(using Context) - extends SyntaxMsg(ImplicitClassPrimaryConstructorArityID){ - def msg = "Implicit classes must accept exactly one primary constructor parameter" - def explain = { - val example = "implicit class RichDate(date: java.util.Date)" - em"""Implicit classes may only take one non-implicit argument in their constructor. For example: + def prefixEnumClause(addendum: String) = + if enumClause.nonEmpty then s".$enumClause$addendum" else addendum + + val finalAddendum = + if addendum.nonEmpty then prefixEnumClause(addendum) + else closest match + case (d, sym) :: _ => + val siteName = site match + case site: NamedType => site.name.show + case site => i"$site" + val showName = + // Add .type to the name if it is a module + if sym.is(ModuleClass) then s"${sym.name.show}.type" + else sym.name.show + s" - did you mean $siteName.$showName?$enumClause" + case Nil => prefixEnumClause("") + + i"$selected $name is not a member of ${site.widen}$finalAddendum" + } + + def explain(using Context) = "" +} + +class EarlyDefinitionsNotSupported()(using Context) +extends SyntaxMsg(EarlyDefinitionsNotSupportedID) { + def msg(using Context) = "Early definitions are not supported; use trait parameters instead" + + def explain(using Context) = { + val code1 = + """|trait Logging { + | val f: File + | f.open() + | onExit(f.close()) + | def log(msg: String) = f.write(msg) + |} | - | $example + |class B extends Logging { + | val f = new File("log.data") // triggers a NullPointerException + |} | - |While it’s possible to create an implicit class with more than one non-implicit argument, - |such classes aren’t used during implicit lookup. - |""" - } - } - - class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(using Context) - extends SyntaxMsg(ObjectMayNotHaveSelfTypeID) { - def msg = em"""${hl("object")}s must not have a self ${hl("type")}""" - - def explain = { - val untpd.ModuleDef(name, tmpl) = mdef - val ValDef(_, selfTpt, _) = tmpl.self - em"""|${hl("object")}s must not have a self ${hl("type")}: - | - |Consider these alternative solutions: - | - Create a trait or a class instead of an object - | - Let the object extend a trait containing the self type: - | - | object $name extends ${selfTpt.show}""" - } - } - - class RepeatedModifier(modifier: String)(implicit ctx:Context) - extends SyntaxMsg(RepeatedModifierID) { - def msg = em"""Repeated modifier $modifier""" - - def explain = { - val code1 = em"""private private val Origin = Point(0, 0)""" - val code2 = em"""private final val Origin = Point(0, 0)""" - em"""This happens when you accidentally specify the same modifier twice. - | - |Example: - | - |$code1 - | - |instead of - | - |$code2 - | - |""" - } - } - - class InterpolatedStringError()(implicit ctx:Context) - extends SyntaxMsg(InterpolatedStringErrorID) { - def msg = "Error in interpolated string: identifier or block expected" - def explain = { - val code1 = "s\"$new Point(0, 0)\"" - val code2 = "s\"${new Point(0, 0)}\"" - em"""|This usually happens when you forget to place your expressions inside curly braces. - | - |$code1 - | - |should be written as - | - |$code2 - |""" - } - } - - class UnboundPlaceholderParameter()(implicit ctx:Context) - extends SyntaxMsg(UnboundPlaceholderParameterID) { - def msg = em"""Unbound placeholder parameter; incorrect use of ${hl("_")}""" - def explain = - em"""|The ${hl("_")} placeholder syntax was used where it could not be bound. - |Consider explicitly writing the variable binding. - | - |This can be done by replacing ${hl("_")} with a variable (eg. ${hl("x")}) - |and adding ${hl("x =>")} where applicable. - | - |Example before: - | - |${hl("{ _ }")} - | - |Example after: - | - |${hl("x => { x }")} - | - |Another common occurrence for this error is defining a val with ${hl("_")}: - | - |${hl("val a = _")} - | - |But this val definition isn't very useful, it can never be assigned - |another value. And thus will always remain uninitialized. - |Consider replacing the ${hl("val")} with ${hl("var")}: - | - |${hl("var a = _")} - | - |Note that this use of ${hl("_")} is not placeholder syntax, - |but an uninitialized var definition. - |Only fields can be left uninitialized in this manner; local variables - |must be initialized. - | - |Another occurrence for this error is self type definition. - |The ${hl("_")} can be replaced with ${hl("this")}. - | - |Example before: - | - |${hl("trait A { _: B => ... ")} - | - |Example after: - | - |${hl("trait A { this: B => ... ")} - |""" - } - - class IllegalStartSimpleExpr(illegalToken: String)(using Context) - extends SyntaxMsg(IllegalStartSimpleExprID) { - def msg = em"expression expected but ${Red(illegalToken)} found" - def explain = { - em"""|An expression cannot start with ${Red(illegalToken)}.""" - } - } + |// early definition gets around the NullPointerException + |class C extends { + | val f = new File("log.data") + |} with Logging""".stripMargin + + val code2 = + """|trait Logging(f: File) { + | f.open() + | onExit(f.close()) + | def log(msg: String) = f.write(msg) + |} + | + |class C extends Logging(new File("log.data"))""".stripMargin - class MissingReturnType()(implicit ctx:Context) - extends SyntaxMsg(MissingReturnTypeID) { - def msg = "Missing return type" - def explain = - em"""|An abstract declaration must have a return type. For example: - | - |trait Shape: - | ${hl("def area: Double")} // abstract declaration returning a Double""" + i"""|Earlier versions of Scala did not support trait parameters and "early + |definitions" (also known as "early initializers") were used as an alternative. + | + |Example of old syntax: + | + |$code1 + | + |The above code can now be written as: + | + |$code2 + |""" } - - class MissingReturnTypeWithReturnStatement(method: Symbol)(using Context) - extends SyntaxMsg(MissingReturnTypeWithReturnStatementID) { - def msg = em"$method has a return statement; it needs a result type" - def explain = - em"""|If a method contains a ${hl("return")} statement, it must have an - |explicit return type. For example: - | - |${hl("def good: Int /* explicit return type */ = return 1")}""" +} + +class TopLevelImplicitClass(cdef: untpd.TypeDef)(using Context) +extends SyntaxMsg(TopLevelImplicitClassID) { + def msg(using Context) = i"""An ${hl("implicit class")} may not be top-level""" + + def explain(using Context) = { + val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef: @unchecked + val exampleArgs = + if(constr0.termParamss.isEmpty) "..." + else constr0.termParamss(0).map(_.withMods(untpd.Modifiers()).show).mkString(", ") + def defHasBody[T] = impl.body.exists(!_.isEmpty) + val exampleBody = if (defHasBody) "{\n ...\n }" else "" + i"""|There may not be any method, member or object in scope with the same name as + |the implicit class and a case class automatically gets a companion object with + |the same name created by the compiler which would cause a naming conflict if it + |were allowed. + | | + |To resolve the conflict declare ${cdef.name} inside of an ${hl("object")} then import the class + |from the object at the use site if needed, for example: + | + |object Implicits { + | implicit class ${cdef.name}($exampleArgs)$exampleBody + |} + | + |// At the use site: + |import Implicits.${cdef.name}""" } +} - class YieldOrDoExpectedInForComprehension()(using Context) - extends SyntaxMsg(YieldOrDoExpectedInForComprehensionID) { - def msg = em"${hl("yield")} or ${hl("do")} expected" - - def explain = - em"""|When the enumerators in a for comprehension are not placed in parentheses or - |braces, a ${hl("do")} or ${hl("yield")} statement is required after the enumerators - |section of the comprehension. - | - |You can save some keystrokes by omitting the parentheses and writing - | - |${hl("val numbers = for i <- 1 to 3 yield i")} - | - | instead of - | - |${hl("val numbers = for (i <- 1 to 3) yield i")} - | - |but the ${hl("yield")} keyword is still required. - | - |For comprehensions that simply perform a side effect without yielding anything - |can also be written without parentheses but a ${hl("do")} keyword has to be - |included. For example, - | - |${hl("for (i <- 1 to 3) println(i)")} - | - |can be written as - | - |${hl("for i <- 1 to 3 do println(i) // notice the 'do' keyword")} - | - |""" - } +class ImplicitCaseClass(cdef: untpd.TypeDef)(using Context) +extends SyntaxMsg(ImplicitCaseClassID) { + def msg(using Context) = i"""A ${hl("case class")} may not be defined as ${hl("implicit")}""" - class ProperDefinitionNotFound()(using Context) - extends Message(ProperDefinitionNotFoundID) { - def kind = MessageKind.DocComment - def msg = em"""Proper definition was not found in ${hl("@usecase")}""" - - def explain = { - val noUsecase = - "def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That" - - val usecase = - """|/** Map from List[A] => List[B] - | * - | * @usecase def map[B](f: A => B): List[B] - | */ - |def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That - |""".stripMargin - - em"""|Usecases are only supported for ${hl("def")}s. They exist because with Scala's - |advanced type-system, we sometimes end up with seemingly scary signatures. - |The usage of these methods, however, needs not be - for instance the ${hl("map")} - |function - | - |${hl("List(1, 2, 3).map(2 * _) // res: List(2, 4, 6)")} - | - |is easy to understand and use - but has a rather bulky signature: - | - |$noUsecase - | - |to mitigate this and ease the usage of such functions we have the ${hl("@usecase")} - |annotation for docstrings. Which can be used like this: - | - |$usecase - | - |When creating the docs, the signature of the method is substituted by the - |usecase and the compiler makes sure that it is valid. Because of this, you're - |only allowed to use ${hl("def")}s when defining usecases.""" - } + def explain(using Context) = + i"""|Implicit classes may not be case classes. Instead use a plain class: + | + |implicit class ${cdef.name}... + | + |""" +} + +class ImplicitClassPrimaryConstructorArity()(using Context) +extends SyntaxMsg(ImplicitClassPrimaryConstructorArityID){ + def msg(using Context) = "Implicit classes must accept exactly one primary constructor parameter" + def explain(using Context) = { + val example = "implicit class RichDate(date: java.util.Date)" + i"""Implicit classes may only take one non-implicit argument in their constructor. For example: + | + | $example + | + |While it’s possible to create an implicit class with more than one non-implicit argument, + |such classes aren’t used during implicit lookup. + |""" } +} - class ByNameParameterNotSupported(tpe: untpd.Tree)(using Context) - extends SyntaxMsg(ByNameParameterNotSupportedID) { - def msg = em"By-name parameter type ${tpe} not allowed here." - - def explain = - em"""|By-name parameters act like functions that are only evaluated when referenced, - |allowing for lazy evaluation of a parameter. - | - |An example of using a by-name parameter would look like: - |${hl("def func(f: => Boolean) = f // 'f' is evaluated when referenced within the function")} - | - |An example of the syntax of passing an actual function as a parameter: - |${hl("def func(f: (Boolean => Boolean)) = f(true)")} - | - |or: - | - |${hl("def func(f: Boolean => Boolean) = f(true)")} - | - |And the usage could be as such: - |${hl("func(bool => // do something...)")} - |""" - } +class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(using Context) +extends SyntaxMsg(ObjectMayNotHaveSelfTypeID) { + def msg(using Context) = i"""${hl("object")}s must not have a self ${hl("type")}""" - class WrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[ParamInfo], actual: List[untpd.Tree])(using Context) - extends SyntaxMsg(WrongNumberOfTypeArgsID) { - - private val expectedCount = expectedArgs.length - private val actualCount = actual.length - private val msgPrefix = if (actualCount > expectedCount) "Too many" else "Not enough" - - def msg = - val expectedArgString = expectedArgs - .map(_.paramName.unexpandedName.show) - .mkString("[", ", ", "]") - val actualArgString = actual.map(_.show).mkString("[", ", ", "]") - val prettyName = - try fntpe.termSymbol match - case NoSymbol => fntpe.show - case symbol => symbol.showFullName - catch case NonFatal(ex) => fntpe.show - em"""|$msgPrefix type arguments for $prettyName$expectedArgString - |expected: $expectedArgString - |actual: $actualArgString""".stripMargin - - def explain = { - val tooManyTypeParams = - """|val tuple2: (Int, String) = (1, "one") - |val list: List[(Int, String)] = List(tuple2)""".stripMargin - - if (actualCount > expectedCount) - em"""|You have supplied too many type parameters - | - |For example List takes a single type parameter (List[A]) - |If you need to hold more types in a list then you need to combine them - |into another data type that can contain the number of types you need, - |In this example one solution would be to use a Tuple: - | - |${tooManyTypeParams}""" - else - em"""|You have not supplied enough type parameters - |If you specify one type parameter then you need to specify every type parameter.""" - } + def explain(using Context) = { + val untpd.ModuleDef(name, tmpl) = mdef + val ValDef(_, selfTpt, _) = tmpl.self + i"""|${hl("object")}s must not have a self ${hl("type")}: + | + |Consider these alternative solutions: + | - Create a trait or a class instead of an object + | - Let the object extend a trait containing the self type: + | + | object $name extends ${selfTpt.show}""" } +} - class IllegalVariableInPatternAlternative(name: Name)(using Context) - extends SyntaxMsg(IllegalVariableInPatternAlternativeID) { - def msg = em"Illegal variable $name in pattern alternative" - def explain = { - val varInAlternative = - """|def g(pair: (Int,Int)): Int = pair match { - | case (1, n) | (n, 1) => n - | case _ => 0 - |}""".stripMargin - - val fixedVarInAlternative = - """|def g(pair: (Int,Int)): Int = pair match { - | case (1, n) => n - | case (n, 1) => n - | case _ => 0 - |}""".stripMargin - - em"""|Variables are not allowed within alternate pattern matches. You can workaround - |this issue by adding additional cases for each alternative. For example, the - |illegal function: - | - |$varInAlternative - |could be implemented by moving each alternative into a separate case: - | - |$fixedVarInAlternative""" - } - } +class RepeatedModifier(modifier: String)(implicit ctx:Context) +extends SyntaxMsg(RepeatedModifierID) { + def msg(using Context) = i"""Repeated modifier $modifier""" - class IdentifierExpected(identifier: String)(using Context) - extends SyntaxMsg(IdentifierExpectedID) { - def msg = "identifier expected" - def explain = { - val wrongIdentifier = em"def foo: $identifier = {...}" - val validIdentifier = em"def foo = {...}" - em"""|An identifier expected, but $identifier found. This could be because - |$identifier is not a valid identifier. As a workaround, the compiler could - |infer the type for you. For example, instead of: - | - |$wrongIdentifier - | - |Write your code like: - | - |$validIdentifier - | - |""" - } + def explain(using Context) = { + val code1 = "private private val Origin = Point(0, 0)" + val code2 = "private final val Origin = Point(0, 0)" + i"""This happens when you accidentally specify the same modifier twice. + | + |Example: + | + |$code1 + | + |instead of + | + |$code2 + | + |""" } +} - class AuxConstructorNeedsNonImplicitParameter()(implicit ctx:Context) - extends SyntaxMsg(AuxConstructorNeedsNonImplicitParameterID) { - def msg = "Auxiliary constructor needs non-implicit parameter list" - def explain = - em"""|Only the primary constructor is allowed an ${hl("implicit")} parameter list; - |auxiliary constructors need non-implicit parameter lists. When a primary - |constructor has an implicit argslist, auxiliary constructors that call the - |primary constructor must specify the implicit value. - | - |To resolve this issue check for: - | - Forgotten parenthesis on ${hl("this")} (${hl("def this() = { ... }")}) - | - Auxiliary constructors specify the implicit value - |""" +class InterpolatedStringError()(implicit ctx:Context) +extends SyntaxMsg(InterpolatedStringErrorID) { + def msg(using Context) = "Error in interpolated string: identifier or block expected" + def explain(using Context) = { + val code1 = "s\"$new Point(0, 0)\"" + val code2 = "s\"${new Point(0, 0)}\"" + i"""|This usually happens when you forget to place your expressions inside curly braces. + | + |$code1 + | + |should be written as + | + |$code2 + |""" } +} - class IllegalLiteral()(using Context) - extends SyntaxMsg(IllegalLiteralID) { - def msg = "Illegal literal" - def explain = - em"""|Available literals can be divided into several groups: - | - Integer literals: 0, 21, 0xFFFFFFFF, -42L - | - Floating Point Literals: 0.0, 1e30f, 3.14159f, 1.0e-100, .1 - | - Boolean Literals: true, false - | - Character Literals: 'a', '\u0041', '\n' - | - String Literals: "Hello, World!" - | - null - |""" - } +class UnboundPlaceholderParameter()(implicit ctx:Context) +extends SyntaxMsg(UnboundPlaceholderParameterID) { + def msg(using Context) = i"""Unbound placeholder parameter; incorrect use of ${hl("_")}""" + def explain(using Context) = + i"""|The ${hl("_")} placeholder syntax was used where it could not be bound. + |Consider explicitly writing the variable binding. + | + |This can be done by replacing ${hl("_")} with a variable (eg. ${hl("x")}) + |and adding ${hl("x =>")} where applicable. + | + |Example before: + | + |${hl("{ _ }")} + | + |Example after: + | + |${hl("x => { x }")} + | + |Another common occurrence for this error is defining a val with ${hl("_")}: + | + |${hl("val a = _")} + | + |But this val definition isn't very useful, it can never be assigned + |another value. And thus will always remain uninitialized. + |Consider replacing the ${hl("val")} with ${hl("var")}: + | + |${hl("var a = _")} + | + |Note that this use of ${hl("_")} is not placeholder syntax, + |but an uninitialized var definition. + |Only fields can be left uninitialized in this manner; local variables + |must be initialized. + | + |Another occurrence for this error is self type definition. + |The ${hl("_")} can be replaced with ${hl("this")}. + | + |Example before: + | + |${hl("trait A { _: B => ... ")} + | + |Example after: + | + |${hl("trait A { this: B => ... ")} + |""" +} - class LossyWideningConstantConversion(sourceType: Type, targetType: Type)(using Context) - extends Message(LossyWideningConstantConversionID): - def kind = MessageKind.LossyConversion - def msg = em"""|Widening conversion from $sourceType to $targetType loses precision. - |Write `.to$targetType` instead.""".stripMargin - def explain = "" - - class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context) - extends Message(PatternMatchExhaustivityID) { - def kind = MessageKind.PatternMatchExhaustivity - lazy val uncovered = uncoveredFn - def msg = - val addendum = if hasMore then "(More unmatched cases are elided)" else "" - em"""|${hl("match")} may not be exhaustive. - | - |It would fail on pattern case: $uncovered - |$addendum""" - - - def explain = - em"""|There are several ways to make the match exhaustive: - | - Add missing cases as shown in the warning - | - If an extractor always return ${hl("Some(...)")}, write ${hl("Some[X]")} for its return type - | - Add a ${hl("case _ => ...")} at the end to match all remaining cases - |""" +class IllegalStartSimpleExpr(illegalToken: String)(using Context) +extends SyntaxMsg(IllegalStartSimpleExprID) { + def msg(using Context) = i"expression expected but ${Red(illegalToken)} found" + def explain(using Context) = { + i"""|An expression cannot start with ${Red(illegalToken)}.""" } +} - class UncheckedTypePattern(msgFn: => String)(using Context) - extends PatternMatchMsg(UncheckedTypePatternID) { - def msg = msgFn - def explain = - em"""|Type arguments and type refinements are erased during compile time, thus it's - |impossible to check them at run-time. - | - |You can either replace the type arguments by ${hl("_")} or use `@unchecked`. - |""" - } +class MissingReturnType()(implicit ctx:Context) +extends SyntaxMsg(MissingReturnTypeID) { + def msg(using Context) = "Missing return type" + def explain(using Context) = + i"""|An abstract declaration must have a return type. For example: + | + |trait Shape: + | ${hl("def area: Double")} // abstract declaration returning a Double""" +} + +class MissingReturnTypeWithReturnStatement(method: Symbol)(using Context) +extends SyntaxMsg(MissingReturnTypeWithReturnStatementID) { + def msg(using Context) = i"$method has a return statement; it needs a result type" + def explain(using Context) = + i"""|If a method contains a ${hl("return")} statement, it must have an + |explicit return type. For example: + | + |${hl("def good: Int /* explicit return type */ = return 1")}""" +} - class MatchCaseUnreachable()(using Context) - extends Message(MatchCaseUnreachableID) { - def kind = MessageKind.MatchCaseUnreachable - def msg = "Unreachable case" - def explain = "" - } +class YieldOrDoExpectedInForComprehension()(using Context) +extends SyntaxMsg(YieldOrDoExpectedInForComprehensionID) { + def msg(using Context) = i"${hl("yield")} or ${hl("do")} expected" - class MatchCaseOnlyNullWarning()(using Context) - extends PatternMatchMsg(MatchCaseOnlyNullWarningID) { - def msg = em"""Unreachable case except for ${hl("null")} (if this is intentional, consider writing ${hl("case null =>")} instead).""" - def explain = "" - } + def explain(using Context) = + i"""|When the enumerators in a for comprehension are not placed in parentheses or + |braces, a ${hl("do")} or ${hl("yield")} statement is required after the enumerators + |section of the comprehension. + | + |You can save some keystrokes by omitting the parentheses and writing + | + |${hl("val numbers = for i <- 1 to 3 yield i")} + | + | instead of + | + |${hl("val numbers = for (i <- 1 to 3) yield i")} + | + |but the ${hl("yield")} keyword is still required. + | + |For comprehensions that simply perform a side effect without yielding anything + |can also be written without parentheses but a ${hl("do")} keyword has to be + |included. For example, + | + |${hl("for (i <- 1 to 3) println(i)")} + | + |can be written as + | + |${hl("for i <- 1 to 3 do println(i) // notice the 'do' keyword")} + | + |""" +} + +class ProperDefinitionNotFound()(using Context) +extends Message(ProperDefinitionNotFoundID) { + def kind = MessageKind.DocComment + def msg(using Context) = i"""Proper definition was not found in ${hl("@usecase")}""" + + def explain(using Context) = { + val noUsecase = + "def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That" + + val usecase = + """|/** Map from List[A] => List[B] + | * + | * @usecase def map[B](f: A => B): List[B] + | */ + |def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That + |""".stripMargin - class MatchableWarning(tp: Type, pattern: Boolean)(using Context) - extends TypeMsg(MatchableWarningID) { - def msg = - val kind = if pattern then "pattern selector" else "value" - em"""${kind} should be an instance of Matchable,, - |but it has unmatchable type $tp instead""" - - def explain = - if pattern then - em"""A value of type $tp cannot be the selector of a match expression - |since it is not constrained to be `Matchable`. Matching on unconstrained - |values is disallowed since it can uncover implementation details that - |were intended to be hidden and thereby can violate paramtetricity laws - |for reasoning about programs. - | - |The restriction can be overridden by appending `.asMatchable` to - |the selector value. `asMatchable` needs to be imported from - |scala.compiletime. Example: - | - | import compiletime.asMatchable - | def f[X](x: X) = x.asMatchable match { ... }""" - else - em"""The value can be converted to a `Matchable` by appending `.asMatchable`. - |`asMatchable` needs to be imported from scala.compiletime.""" + i"""|Usecases are only supported for ${hl("def")}s. They exist because with Scala's + |advanced type-system, we sometimes end up with seemingly scary signatures. + |The usage of these methods, however, needs not be - for instance the ${hl("map")} + |function + | + |${hl("List(1, 2, 3).map(2 * _) // res: List(2, 4, 6)")} + | + |is easy to understand and use - but has a rather bulky signature: + | + |$noUsecase + | + |to mitigate this and ease the usage of such functions we have the ${hl("@usecase")} + |annotation for docstrings. Which can be used like this: + | + |$usecase + | + |When creating the docs, the signature of the method is substituted by the + |usecase and the compiler makes sure that it is valid. Because of this, you're + |only allowed to use ${hl("def")}s when defining usecases.""" } +} - class SeqWildcardPatternPos()(using Context) - extends SyntaxMsg(SeqWildcardPatternPosID) { - def msg = em"""${hl("*")} can be used only for last argument""" - def explain = { - val code = - """def sumOfTheFirstTwo(list: List[Int]): Int = list match { - | case List(first, second, x*) => first + second - | case _ => 0 - |}""" - em"""|Sequence wildcard pattern is expected at the end of an argument list. - |This pattern matches any remaining elements in a sequence. - |Consider the following example: - | - |$code - | - |Calling: - | - |${hl("sumOfTheFirstTwo(List(1, 2, 10))")} - | - |would give 3 as a result""" - } - } +class ByNameParameterNotSupported(tpe: untpd.Tree)(using Context) +extends SyntaxMsg(ByNameParameterNotSupportedID) { + def msg(using Context) = i"By-name parameter type ${tpe} not allowed here." - class IllegalStartOfSimplePattern()(using Context) - extends SyntaxMsg(IllegalStartOfSimplePatternID) { - def msg = "pattern expected" - def explain = { - val sipCode = - """def f(x: Int, y: Int) = x match { - | case `y` => ... - |} - """ - val constructorPatternsCode = - """case class Person(name: String, age: Int) + def explain(using Context) = + i"""|By-name parameters act like functions that are only evaluated when referenced, + |allowing for lazy evaluation of a parameter. + | + |An example of using a by-name parameter would look like: + |${hl("def func(f: => Boolean) = f // 'f' is evaluated when referenced within the function")} + | + |An example of the syntax of passing an actual function as a parameter: + |${hl("def func(f: (Boolean => Boolean)) = f(true)")} + | + |or: + | + |${hl("def func(f: Boolean => Boolean) = f(true)")} + | + |And the usage could be as such: + |${hl("func(bool => // do something...)")} + |""" +} + +class WrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[ParamInfo], actual: List[untpd.Tree])(using Context) +extends SyntaxMsg(WrongNumberOfTypeArgsID) { + + private val expectedCount = expectedArgs.length + private val actualCount = actual.length + private val msgPrefix = if (actualCount > expectedCount) "Too many" else "Not enough" + + def msg(using Context) = + val expectedArgString = expectedArgs + .map(_.paramName.unexpandedName.show) + .mkString("[", ", ", "]") + val actualArgString = actual.map(_.show).mkString("[", ", ", "]") + val prettyName = + try fntpe.termSymbol match + case NoSymbol => fntpe.show + case symbol => symbol.showFullName + catch case NonFatal(ex) => fntpe.show + i"""|$msgPrefix type arguments for $prettyName$expectedArgString + |expected: $expectedArgString + |actual: $actualArgString""" + + def explain(using Context) = { + val tooManyTypeParams = + """|val tuple2: (Int, String) = (1, "one") + |val list: List[(Int, String)] = List(tuple2)""".stripMargin + + if (actualCount > expectedCount) + i"""|You have supplied too many type parameters | - |def test(p: Person) = p match { - | case Person(name, age) => ... - |} - """ - val tupplePatternsCode = - """def swap(tuple: (String, Int)): (Int, String) = tuple match { - | case (text, number) => (number, text) - |} - """ - val patternSequencesCode = - """def getSecondValue(list: List[Int]): Int = list match { - | case List(_, second, x:_*) => second + |For example List takes a single type parameter (List[A]) + |If you need to hold more types in a list then you need to combine them + |into another data type that can contain the number of types you need, + |In this example one solution would be to use a Tuple: + | + |${tooManyTypeParams}""" + else + i"""|You have not supplied enough type parameters + |If you specify one type parameter then you need to specify every type parameter.""" + } +} + +class IllegalVariableInPatternAlternative(name: Name)(using Context) +extends SyntaxMsg(IllegalVariableInPatternAlternativeID) { + def msg(using Context) = i"Illegal variable $name in pattern alternative" + def explain(using Context) = { + val varInAlternative = + """|def g(pair: (Int,Int)): Int = pair match { + | case (1, n) | (n, 1) => n | case _ => 0 - |}""" - em"""|Simple patterns can be divided into several groups: - |- Variable Patterns: ${hl("case x => ...")}. - | It matches any value, and binds the variable name to that value. - | A special case is the wild-card pattern _ which is treated as if it was a fresh - | variable on each occurrence. - | - |- Typed Patterns: ${hl("case x: Int => ...")} or ${hl("case _: Int => ...")}. - | This pattern matches any value matched by the specified type; it binds the variable - | name to that value. - | - |- Literal Patterns: ${hl("case 123 => ...")} or ${hl("case 'A' => ...")}. - | This type of pattern matches any value that is equal to the specified literal. - | - |- Stable Identifier Patterns: - | - | $sipCode - | - | the match succeeds only if the x argument and the y argument of f are equal. - | - |- Constructor Patterns: - | - | $constructorPatternsCode - | - | The pattern binds all object's fields to the variable names (name and age, in this - | case). - | - |- Tuple Patterns: - | - | $tupplePatternsCode - | - | Calling: - | - | ${hl("""swap(("Luftballons", 99)""")} - | - | would give ${hl("""(99, "Luftballons")""")} as a result. - | - |- Pattern Sequences: - | - | $patternSequencesCode - | - | Calling: - | - | ${hl("getSecondValue(List(1, 10, 2))")} - | - | would give 10 as a result. - | This pattern is possible because a companion object for the List class has a method - | with the following signature: - | - | ${hl("def unapplySeq[A](x: List[A]): Some[List[A]]")} - |""" - } - } - - class PkgDuplicateSymbol(existing: Symbol)(using Context) - extends NamingMsg(PkgDuplicateSymbolID) { - def msg = em"Trying to define package with same name as $existing" - def explain = "" - } + |}""".stripMargin - class ExistentialTypesNoLongerSupported()(using Context) - extends SyntaxMsg(ExistentialTypesNoLongerSupportedID) { - def msg = - em"""|Existential types are no longer supported - - |use a wildcard or dependent type instead""" - def explain = - em"""|The use of existential types is no longer supported. - | - |You should use a wildcard or dependent type instead. - | - |For example: - | - |Instead of using ${hl("forSome")} to specify a type variable - | - |${hl("List[T forSome { type T }]")} - | - |Try using a wildcard type variable - | - |${hl("List[?]")} - |""" - } + val fixedVarInAlternative = + """|def g(pair: (Int,Int)): Int = pair match { + | case (1, n) => n + | case (n, 1) => n + | case _ => 0 + |}""".stripMargin - class UnboundWildcardType()(using Context) - extends SyntaxMsg(UnboundWildcardTypeID) { - def msg = "Unbound wildcard type" - def explain = - em"""|The wildcard type syntax (${hl("_")}) was used where it could not be bound. - |Replace ${hl("_")} with a non-wildcard type. If the type doesn't matter, - |try replacing ${hl("_")} with ${hl("Any")}. - | - |Examples: - | - |- Parameter lists - | - | Instead of: - | ${hl("def foo(x: _) = ...")} - | - | Use ${hl("Any")} if the type doesn't matter: - | ${hl("def foo(x: Any) = ...")} - | - |- Type arguments - | - | Instead of: - | ${hl("val foo = List[?](1, 2)")} - | - | Use: - | ${hl("val foo = List[Int](1, 2)")} - | - |- Type bounds - | - | Instead of: - | ${hl("def foo[T <: _](x: T) = ...")} - | - | Remove the bounds if the type doesn't matter: - | ${hl("def foo[T](x: T) = ...")} - | - |- ${hl("val")} and ${hl("def")} types - | - | Instead of: - | ${hl("val foo: _ = 3")} - | - | Use: - | ${hl("val foo: Int = 3")} - |""" + i"""|Variables are not allowed within alternate pattern matches. You can workaround + |this issue by adding additional cases for each alternative. For example, the + |illegal function: + | + |$varInAlternative + |could be implemented by moving each alternative into a separate case: + | + |$fixedVarInAlternative""" + } +} + +class IdentifierExpected(identifier: String)(using Context) +extends SyntaxMsg(IdentifierExpectedID) { + def msg(using Context) = "identifier expected" + def explain(using Context) = { + val wrongIdentifier = i"def foo: $identifier = {...}" + val validIdentifier = i"def foo = {...}" + i"""|An identifier expected, but $identifier found. This could be because + |$identifier is not a valid identifier. As a workaround, the compiler could + |infer the type for you. For example, instead of: + | + |$wrongIdentifier + | + |Write your code like: + | + |$validIdentifier + | + |""" } +} - class OverridesNothing(member: Symbol)(using Context) - extends DeclarationMsg(OverridesNothingID) { - def msg = em"""${member} overrides nothing""" - - def explain = - em"""|There must be a field or method with the name ${member.name} in a super - |class of ${member.owner} to override it. Did you misspell it? - |Are you extending the right classes? - |""" - } +class AuxConstructorNeedsNonImplicitParameter()(implicit ctx:Context) +extends SyntaxMsg(AuxConstructorNeedsNonImplicitParameterID) { + def msg(using Context) = "Auxiliary constructor needs non-implicit parameter list" + def explain(using Context) = + i"""|Only the primary constructor is allowed an ${hl("implicit")} parameter list; + |auxiliary constructors need non-implicit parameter lists. When a primary + |constructor has an implicit argslist, auxiliary constructors that call the + |primary constructor must specify the implicit value. + | + |To resolve this issue check for: + | - Forgotten parenthesis on ${hl("this")} (${hl("def this() = { ... }")}) + | - Auxiliary constructors specify the implicit value + |""" +} + +class IllegalLiteral()(using Context) +extends SyntaxMsg(IllegalLiteralID) { + def msg(using Context) = "Illegal literal" + def explain(using Context) = + i"""|Available literals can be divided into several groups: + | - Integer literals: 0, 21, 0xFFFFFFFF, -42L + | - Floating Point Literals: 0.0, 1e30f, 3.14159f, 1.0e-100, .1 + | - Boolean Literals: true, false + | - Character Literals: 'a', '\u0041', '\n' + | - String Literals: "Hello, World!" + | - null + |""" +} + +class LossyWideningConstantConversion(sourceType: Type, targetType: Type)(using Context) +extends Message(LossyWideningConstantConversionID): + def kind = MessageKind.LossyConversion + def msg(using Context) = i"""|Widening conversion from $sourceType to $targetType loses precision. + |Write `.to$targetType` instead.""" + def explain(using Context) = "" + +class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context) +extends Message(PatternMatchExhaustivityID) { + def kind = MessageKind.PatternMatchExhaustivity + lazy val uncovered = uncoveredFn + def msg(using Context) = + val addendum = if hasMore then "(More unmatched cases are elided)" else "" + i"""|${hl("match")} may not be exhaustive. + | + |It would fail on pattern case: $uncovered + |$addendum""" - class OverridesNothingButNameExists(member: Symbol, existing: List[Denotations.SingleDenotation])(using Context) - extends DeclarationMsg(OverridesNothingButNameExistsID) { - def msg = - val what = - if !existing.exists(_.symbol.hasTargetName(member.targetName)) - then "target name" - else "signature" - em"""${member} has a different $what than the overridden declaration""" - def explain = - val existingDecl: String = existing.map(_.showDcl).mkString(" \n") - em"""|There must be a non-final field or method with the name ${member.name} and the - |same parameter list in a super class of ${member.owner} to override it. - | - | ${member.showDcl} - | - |The super classes of ${member.owner} contain the following members - |named ${member.name}: - | ${existingDecl} - |""" - } - class OverrideError(override val msg: String) extends DeclarationMsg(OverrideErrorID): - def explain = "" - - class OverrideTypeMismatchError(override val msg: String, memberTp: Type, otherTp: Type)(using Context) - extends DeclarationMsg(OverrideTypeMismatchErrorID): - def explain = err.whyNoMatchStr(memberTp, otherTp) - override def canExplain = true - - class ForwardReferenceExtendsOverDefinition(value: Symbol, definition: Symbol)(using Context) - extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID) { - def msg = em"${definition.name} is a forward reference extending over the definition of ${value.name}" - - def explain = - em"""|${definition.name} is used before you define it, and the definition of ${value.name} - |appears between that use and the definition of ${definition.name}. - | - |Forward references are allowed only, if there are no value definitions between - |the reference and the referred method definition. - | - |Define ${definition.name} before it is used, - |or move the definition of ${value.name} so it does not appear between - |the declaration of ${definition.name} and its use, - |or define ${value.name} as lazy. - |""".stripMargin + def explain(using Context) = + i"""|There are several ways to make the match exhaustive: + | - Add missing cases as shown in the warning + | - If an extractor always return ${hl("Some(...)")}, write ${hl("Some[X]")} for its return type + | - Add a ${hl("case _ => ...")} at the end to match all remaining cases + |""" +} + +class UncheckedTypePattern(msgFn: => String)(using Context) + extends PatternMatchMsg(UncheckedTypePatternID) { + def msg(using Context) = msgFn + def explain(using Context) = + i"""|Type arguments and type refinements are erased during compile time, thus it's + |impossible to check them at run-time. + | + |You can either replace the type arguments by ${hl("_")} or use `@unchecked`. + |""" +} + +class MatchCaseUnreachable()(using Context) +extends Message(MatchCaseUnreachableID) { + def kind = MessageKind.MatchCaseUnreachable + def msg(using Context) = "Unreachable case" + def explain(using Context) = "" +} + +class MatchCaseOnlyNullWarning()(using Context) +extends PatternMatchMsg(MatchCaseOnlyNullWarningID) { + def msg(using Context) = i"""Unreachable case except for ${hl("null")} (if this is intentional, consider writing ${hl("case null =>")} instead).""" + def explain(using Context) = "" +} + +class MatchableWarning(tp: Type, pattern: Boolean)(using Context) +extends TypeMsg(MatchableWarningID) { + def msg(using Context) = + val kind = if pattern then "pattern selector" else "value" + i"""${kind} should be an instance of Matchable,, + |but it has unmatchable type $tp instead""" + + def explain(using Context) = + if pattern then + i"""A value of type $tp cannot be the selector of a match expression + |since it is not constrained to be `Matchable`. Matching on unconstrained + |values is disallowed since it can uncover implementation details that + |were intended to be hidden and thereby can violate paramtetricity laws + |for reasoning about programs. + | + |The restriction can be overridden by appending `.asMatchable` to + |the selector value. `asMatchable` needs to be imported from + |scala.compiletime. Example: + | + | import compiletime.asMatchable + | def f[X](x: X) = x.asMatchable match { ... }""" + else + i"""The value can be converted to a `Matchable` by appending `.asMatchable`. + |`asMatchable` needs to be imported from scala.compiletime.""" +} + +class SeqWildcardPatternPos()(using Context) +extends SyntaxMsg(SeqWildcardPatternPosID) { + def msg(using Context) = i"""${hl("*")} can be used only for last argument""" + def explain(using Context) = { + val code = + """def sumOfTheFirstTwo(list: List[Int]): Int = list match { + | case List(first, second, x*) => first + second + | case _ => 0 + |}""" + i"""|Sequence wildcard pattern is expected at the end of an argument list. + |This pattern matches any remaining elements in a sequence. + |Consider the following example: + | + |$code + | + |Calling: + | + |${hl("sumOfTheFirstTwo(List(1, 2, 10))")} + | + |would give 3 as a result""" + } +} + +class IllegalStartOfSimplePattern()(using Context) +extends SyntaxMsg(IllegalStartOfSimplePatternID) { + def msg(using Context) = "pattern expected" + def explain(using Context) = { + val sipCode = + """def f(x: Int, y: Int) = x match { + | case `y` => ... + |} + """ + val constructorPatternsCode = + """case class Person(name: String, age: Int) + | + |def test(p: Person) = p match { + | case Person(name, age) => ... + |} + """ + val tupplePatternsCode = + """def swap(tuple: (String, Int)): (Int, String) = tuple match { + | case (text, number) => (number, text) + |} + """ + val patternSequencesCode = + """def getSecondValue(list: List[Int]): Int = list match { + | case List(_, second, x:_*) => second + | case _ => 0 + |}""" + i"""|Simple patterns can be divided into several groups: + |- Variable Patterns: ${hl("case x => ...")}. + | It matches any value, and binds the variable name to that value. + | A special case is the wild-card pattern _ which is treated as if it was a fresh + | variable on each occurrence. + | + |- Typed Patterns: ${hl("case x: Int => ...")} or ${hl("case _: Int => ...")}. + | This pattern matches any value matched by the specified type; it binds the variable + | name to that value. + | + |- Literal Patterns: ${hl("case 123 => ...")} or ${hl("case 'A' => ...")}. + | This type of pattern matches any value that is equal to the specified literal. + | + |- Stable Identifier Patterns: + | + | $sipCode + | + | the match succeeds only if the x argument and the y argument of f are equal. + | + |- Constructor Patterns: + | + | $constructorPatternsCode + | + | The pattern binds all object's fields to the variable names (name and age, in this + | case). + | + |- Tuple Patterns: + | + | $tupplePatternsCode + | + | Calling: + | + | ${hl("""swap(("Luftballons", 99)""")} + | + | would give ${hl("""(99, "Luftballons")""")} as a result. + | + |- Pattern Sequences: + | + | $patternSequencesCode + | + | Calling: + | + | ${hl("getSecondValue(List(1, 10, 2))")} + | + | would give 10 as a result. + | This pattern is possible because a companion object for the List class has a method + | with the following signature: + | + | ${hl("def unapplySeq[A](x: List[A]): Some[List[A]]")} + |""" } +} + +class PkgDuplicateSymbol(existing: Symbol)(using Context) +extends NamingMsg(PkgDuplicateSymbolID) { + def msg(using Context) = i"Trying to define package with same name as $existing" + def explain(using Context) = "" +} + +class ExistentialTypesNoLongerSupported()(using Context) +extends SyntaxMsg(ExistentialTypesNoLongerSupportedID) { + def msg(using Context) = + i"""|Existential types are no longer supported - + |use a wildcard or dependent type instead""" + def explain(using Context) = + i"""|The use of existential types is no longer supported. + | + |You should use a wildcard or dependent type instead. + | + |For example: + | + |Instead of using ${hl("forSome")} to specify a type variable + | + |${hl("List[T forSome { type T }]")} + | + |Try using a wildcard type variable + | + |${hl("List[?]")} + |""" +} + +class UnboundWildcardType()(using Context) +extends SyntaxMsg(UnboundWildcardTypeID) { + def msg(using Context) = "Unbound wildcard type" + def explain(using Context) = + i"""|The wildcard type syntax (${hl("_")}) was used where it could not be bound. + |Replace ${hl("_")} with a non-wildcard type. If the type doesn't matter, + |try replacing ${hl("_")} with ${hl("Any")}. + | + |Examples: + | + |- Parameter lists + | + | Instead of: + | ${hl("def foo(x: _) = ...")} + | + | Use ${hl("Any")} if the type doesn't matter: + | ${hl("def foo(x: Any) = ...")} + | + |- Type arguments + | + | Instead of: + | ${hl("val foo = List[?](1, 2)")} + | + | Use: + | ${hl("val foo = List[Int](1, 2)")} + | + |- Type bounds + | + | Instead of: + | ${hl("def foo[T <: _](x: T) = ...")} + | + | Remove the bounds if the type doesn't matter: + | ${hl("def foo[T](x: T) = ...")} + | + |- ${hl("val")} and ${hl("def")} types + | + | Instead of: + | ${hl("val foo: _ = 3")} + | + | Use: + | ${hl("val foo: Int = 3")} + |""" +} - class ExpectedTokenButFound(expected: Token, found: Token)(using Context) - extends SyntaxMsg(ExpectedTokenButFoundID) { +class OverridesNothing(member: Symbol)(using Context) +extends DeclarationMsg(OverridesNothingID) { + def msg(using Context) = i"""${member} overrides nothing""" - private lazy val foundText = Tokens.showToken(found) + def explain(using Context) = + i"""|There must be a field or method with the name ${member.name} in a super + |class of ${member.owner} to override it. Did you misspell it? + |Are you extending the right classes? + |""" +} + +class OverridesNothingButNameExists(member: Symbol, existing: List[Denotations.SingleDenotation])(using Context) +extends DeclarationMsg(OverridesNothingButNameExistsID) { + def msg(using Context) = + val what = + if !existing.exists(_.symbol.hasTargetName(member.targetName)) + then "target name" + else "signature" + i"""${member} has a different $what than the overridden declaration""" + def explain(using Context) = + val existingDecl: String = existing.map(_.showDcl).mkString(" \n") + i"""|There must be a non-final field or method with the name ${member.name} and the + |same parameter list in a super class of ${member.owner} to override it. + | + | ${member.showDcl} + | + |The super classes of ${member.owner} contain the following members + |named ${member.name}: + | ${existingDecl} + |""" +} + +class OverrideError( + core: Context ?=> String, base: Type, + member: Symbol, other: Symbol, + memberTp: Type, otherTp: Type)(using Context) +extends DeclarationMsg(OverrideErrorID), NoDisambiguation: + def msg(using Context) = + val isConcreteOverAbstract = + (other.owner isSubClass member.owner) && other.is(Deferred) && !member.is(Deferred) + def addendum = + if isConcreteOverAbstract then + i"""| + |(Note that ${err.infoStringWithLocation(other, base)} is abstract, + |and is therefore overridden by concrete ${err.infoStringWithLocation(member, base)})""" + else "" + i"""error overriding ${err.infoStringWithLocation(other, base)}; + | ${err.infoString(member, base, showLocation = member.owner != base.typeSymbol)} $core$addendum""" + override def canExplain = + memberTp.exists && otherTp.exists + def explain(using Context) = + if canExplain then err.whyNoMatchStr(memberTp, otherTp) else "" + +class ForwardReferenceExtendsOverDefinition(value: Symbol, definition: Symbol)(using Context) +extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID) { + def msg(using Context) = i"${definition.name} is a forward reference extending over the definition of ${value.name}" + + def explain(using Context) = + i"""|${definition.name} is used before you define it, and the definition of ${value.name} + |appears between that use and the definition of ${definition.name}. + | + |Forward references are allowed only, if there are no value definitions between + |the reference and the referred method definition. + | + |Define ${definition.name} before it is used, + |or move the definition of ${value.name} so it does not appear between + |the declaration of ${definition.name} and its use, + |or define ${value.name} as lazy. + |""" +} - def msg = - val expectedText = - if (Tokens.isIdentifier(expected)) "an identifier" - else Tokens.showToken(expected) - em"""${expectedText} expected, but ${foundText} found""" +class ExpectedTokenButFound(expected: Token, found: Token)(using Context) +extends SyntaxMsg(ExpectedTokenButFoundID) { - def explain = - if (Tokens.isIdentifier(expected) && Tokens.isKeyword(found)) - s""" - |If you want to use $foundText as identifier, you may put it in backticks: `${Tokens.tokenString(found)}`.""".stripMargin - else - "" - } + private def foundText = Tokens.showToken(found) - class MixedLeftAndRightAssociativeOps(op1: Name, op2: Name, op2LeftAssoc: Boolean)(using Context) - extends SyntaxMsg(MixedLeftAndRightAssociativeOpsID) { - def msg = - val op1Asso: String = if (op2LeftAssoc) "which is right-associative" else "which is left-associative" - val op2Asso: String = if (op2LeftAssoc) "which is left-associative" else "which is right-associative" - em"${op1} (${op1Asso}) and ${op2} ($op2Asso) have same precedence and may not be mixed" - def explain = - s"""|The operators ${op1} and ${op2} are used as infix operators in the same expression, - |but they bind to different sides: - |${op1} is applied to the operand to its ${if (op2LeftAssoc) "right" else "left"} - |${op2} is applied to the operand to its ${if (op2LeftAssoc) "left" else "right"} - |As both have the same precedence the compiler can't decide which to apply first. - | - |You may use parenthesis to make the application order explicit, - |or use method application syntax operand1.${op1}(operand2). - | - |Operators ending in a colon ${hl(":")} are right-associative. All other operators are left-associative. - | - |Infix operator precedence is determined by the operator's first character. Characters are listed - |below in increasing order of precedence, with characters on the same line having the same precedence. - | (all letters) - | | - | ^ - | & - | = ! - | < > - | : - | + - - | * / % - | (all other special characters) - |Operators starting with a letter have lowest precedence, followed by operators starting with `|`, etc. - |""".stripMargin - } + def msg(using Context) = + val expectedText = + if (Tokens.isIdentifier(expected)) "an identifier" + else Tokens.showToken(expected) + i"""${expectedText} expected, but ${foundText} found""" - class CantInstantiateAbstractClassOrTrait(cls: Symbol, isTrait: Boolean)(using Context) - extends TypeMsg(CantInstantiateAbstractClassOrTraitID) { - private val traitOrAbstract = if (isTrait) "a trait" else "abstract" - def msg = em"""${cls.name} is ${traitOrAbstract}; it cannot be instantiated""" - def explain = - em"""|Abstract classes and traits need to be extended by a concrete class or object - |to make their functionality accessible. - | - |You may want to create an anonymous class extending ${cls.name} with - | ${s"class ${cls.name} { }"} - | - |or add a companion object with - | ${s"object ${cls.name} extends ${cls.name}"} - | - |You need to implement any abstract members in both cases. - |""".stripMargin - } - - class UnreducibleApplication(tycon: Type)(using Context) extends TypeMsg(UnreducibleApplicationID): - def msg = em"unreducible application of higher-kinded type $tycon to wildcard arguments" - def explain = - em"""|An abstract type constructor cannot be applied to wildcard arguments. - |Such applications are equivalent to existential types, which are not - |supported in Scala 3.""" - - class OverloadedOrRecursiveMethodNeedsResultType(cycleSym: Symbol)(using Context) - extends CyclicMsg(OverloadedOrRecursiveMethodNeedsResultTypeID) { - def msg = em"""Overloaded or recursive $cycleSym needs return type""" - def explain = - em"""Case 1: $cycleSym is overloaded - |If there are multiple methods named $cycleSym and at least one definition of - |it calls another, you need to specify the calling method's return type. - | - |Case 2: $cycleSym is recursive - |If $cycleSym calls itself on any path (even through mutual recursion), you need to specify the return type - |of $cycleSym or of a definition it's mutually recursive with. - |""".stripMargin - } - - class RecursiveValueNeedsResultType(cycleSym: Symbol)(using Context) - extends CyclicMsg(RecursiveValueNeedsResultTypeID) { - def msg = em"""Recursive $cycleSym needs type""" - def explain = - em"""The definition of $cycleSym is recursive and you need to specify its type. - |""".stripMargin - } - - class CyclicReferenceInvolving(denot: SymDenotation)(using Context) - extends CyclicMsg(CyclicReferenceInvolvingID) { - def msg = - val where = if denot.exists then s" involving $denot" else "" - em"Cyclic reference$where" - def explain = - em"""|$denot is declared as part of a cycle which makes it impossible for the - |compiler to decide upon ${denot.name}'s type. - |To avoid this error, try giving ${denot.name} an explicit type. - |""".stripMargin - } + def explain(using Context) = + if (Tokens.isIdentifier(expected) && Tokens.isKeyword(found)) + s""" + |If you want to use $foundText as identifier, you may put it in backticks: `${Tokens.tokenString(found)}`.""".stripMargin + else + "" +} + +class MixedLeftAndRightAssociativeOps(op1: Name, op2: Name, op2LeftAssoc: Boolean)(using Context) +extends SyntaxMsg(MixedLeftAndRightAssociativeOpsID) { + def msg(using Context) = + val op1Asso: String = if (op2LeftAssoc) "which is right-associative" else "which is left-associative" + val op2Asso: String = if (op2LeftAssoc) "which is left-associative" else "which is right-associative" + i"${op1} (${op1Asso}) and ${op2} ($op2Asso) have same precedence and may not be mixed" + def explain(using Context) = + s"""|The operators ${op1} and ${op2} are used as infix operators in the same expression, + |but they bind to different sides: + |${op1} is applied to the operand to its ${if (op2LeftAssoc) "right" else "left"} + |${op2} is applied to the operand to its ${if (op2LeftAssoc) "left" else "right"} + |As both have the same precedence the compiler can't decide which to apply first. + | + |You may use parenthesis to make the application order explicit, + |or use method application syntax operand1.${op1}(operand2). + | + |Operators ending in a colon ${hl(":")} are right-associative. All other operators are left-associative. + | + |Infix operator precedence is determined by the operator's first character. Characters are listed + |below in increasing order of precedence, with characters on the same line having the same precedence. + | (all letters) + | | + | ^ + | & + | = ! + | < > + | : + | + - + | * / % + | (all other special characters) + |Operators starting with a letter have lowest precedence, followed by operators starting with `|`, etc. + |""".stripMargin +} + +class CantInstantiateAbstractClassOrTrait(cls: Symbol, isTrait: Boolean)(using Context) +extends TypeMsg(CantInstantiateAbstractClassOrTraitID) { + private val traitOrAbstract = if (isTrait) "a trait" else "abstract" + def msg(using Context) = i"""${cls.name} is ${traitOrAbstract}; it cannot be instantiated""" + def explain(using Context) = + i"""|Abstract classes and traits need to be extended by a concrete class or object + |to make their functionality accessible. + | + |You may want to create an anonymous class extending ${cls.name} with + | ${s"class ${cls.name} { }"} + | + |or add a companion object with + | ${s"object ${cls.name} extends ${cls.name}"} + | + |You need to implement any abstract members in both cases. + |""" +} + +class UnreducibleApplication(tycon: Type)(using Context) extends TypeMsg(UnreducibleApplicationID): + def msg(using Context) = i"unreducible application of higher-kinded type $tycon to wildcard arguments" + def explain(using Context) = + i"""|An abstract type constructor cannot be applied to wildcard arguments. + |Such applications are equivalent to existential types, which are not + |supported in Scala 3.""" + +class OverloadedOrRecursiveMethodNeedsResultType(cycleSym: Symbol)(using Context) +extends CyclicMsg(OverloadedOrRecursiveMethodNeedsResultTypeID) { + def msg(using Context) = i"""Overloaded or recursive $cycleSym needs return type""" + def explain(using Context) = + i"""Case 1: $cycleSym is overloaded + |If there are multiple methods named $cycleSym and at least one definition of + |it calls another, you need to specify the calling method's return type. + | + |Case 2: $cycleSym is recursive + |If $cycleSym calls itself on any path (even through mutual recursion), you need to specify the return type + |of $cycleSym or of a definition it's mutually recursive with. + |""" +} - class CyclicReferenceInvolvingImplicit(cycleSym: Symbol)(using Context) - extends CyclicMsg(CyclicReferenceInvolvingImplicitID) { - def msg = em"""Cyclic reference involving implicit $cycleSym""" - def explain = - em"""|$cycleSym is declared as part of a cycle which makes it impossible for the - |compiler to decide upon ${cycleSym.name}'s type. - |This might happen when the right hand-side of $cycleSym's definition involves an implicit search. - |To avoid this error, try giving ${cycleSym.name} an explicit type. - |""".stripMargin - } +class RecursiveValueNeedsResultType(cycleSym: Symbol)(using Context) +extends CyclicMsg(RecursiveValueNeedsResultTypeID) { + def msg(using Context) = i"""Recursive $cycleSym needs type""" + def explain(using Context) = + i"""The definition of $cycleSym is recursive and you need to specify its type. + |""" +} + +class CyclicReferenceInvolving(denot: SymDenotation)(using Context) +extends CyclicMsg(CyclicReferenceInvolvingID) { + def msg(using Context) = + val where = if denot.exists then s" involving $denot" else "" + i"Cyclic reference$where" + def explain(using Context) = + i"""|$denot is declared as part of a cycle which makes it impossible for the + |compiler to decide upon ${denot.name}'s type. + |To avoid this error, try giving ${denot.name} an explicit type. + |""" +} + +class CyclicReferenceInvolvingImplicit(cycleSym: Symbol)(using Context) +extends CyclicMsg(CyclicReferenceInvolvingImplicitID) { + def msg(using Context) = i"""Cyclic reference involving implicit $cycleSym""" + def explain(using Context) = + i"""|$cycleSym is declared as part of a cycle which makes it impossible for the + |compiler to decide upon ${cycleSym.name}'s type. + |This might happen when the right hand-side of $cycleSym's definition involves an implicit search. + |To avoid this error, try giving ${cycleSym.name} an explicit type. + |""" +} - class SkolemInInferred(tree: tpd.Tree, pt: Type, argument: tpd.Tree)(using Context) - extends TypeMsg(SkolemInInferredID): - private def argStr = +class SkolemInInferred(tree: tpd.Tree, pt: Type, argument: tpd.Tree)(using Context) +extends TypeMsg(SkolemInInferredID): + def msg(using Context) = + def argStr = if argument.isEmpty then "" else i" from argument of type ${argument.tpe.widen}" - def msg = - em"""Failure to generate given instance for type $pt$argStr) - | - |I found: $tree - |But the part corresponding to `` is not a reference that can be generated. - |This might be because resolution yielded as given instance a function that is not - |known to be total and side-effect free.""" - def explain = - em"""The part of given resolution that corresponds to `` produced a term that - |is not a stable reference. Therefore a given instance could not be generated. - | - |To trouble-shoot the problem, try to supply an explicit expression instead of - |relying on implicit search at this point.""" - - class SuperQualMustBeParent(qual: untpd.Ident, cls: ClassSymbol)(using Context) - extends ReferenceMsg(SuperQualMustBeParentID) { - def msg = em"""|$qual does not name a parent of $cls""" - def explain = - val parents: Seq[String] = (cls.info.parents map (_.typeSymbol.name.show)).sorted - em"""|When a qualifier ${hl("T")} is used in a ${hl("super")} prefix of the form ${hl("C.super[T]")}, - |${hl("T")} must be a parent type of ${hl("C")}. - | - |In this case, the parents of $cls are: - |${parents.mkString(" - ", "\n - ", "")} - |""".stripMargin - } - - class VarArgsParamMustComeLast()(using Context) - extends SyntaxMsg(VarArgsParamMustComeLastID) { - def msg = em"""${hl("varargs")} parameter must come last""" - def explain = - em"""|The ${hl("varargs")} field must be the last field in the method signature. - |Attempting to define a field in a method signature after a ${hl("varargs")} field is an error. - |""" - } - - import typer.Typer.BindingPrec - - class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(using Context) - extends ReferenceMsg(AmbiguousReferenceID) { - - /** A string which explains how something was bound; Depending on `prec` this is either - * imported by - * or defined in - */ - private def bindingString(prec: BindingPrec, whereFound: Context, qualifier: String = "") = { - val howVisible = prec match { - case BindingPrec.Definition => "defined" - case BindingPrec.Inheritance => "inherited" - case BindingPrec.NamedImport => "imported by name" - case BindingPrec.WildImport => "imported" - case BindingPrec.PackageClause => "found" - case BindingPrec.NothingBound => assert(false) - } - if (prec.isImportPrec) { - ex"""$howVisible$qualifier by ${em"${whereFound.importInfo}"}""" - } else - ex"""$howVisible$qualifier in ${em"${whereFound.owner}"}""" - } - - def msg = - i"""|Reference to ${em"$name"} is ambiguous, - |it is both ${bindingString(newPrec, ctx)} - |and ${bindingString(prevPrec, prevCtx, " subsequently")}""" - - def explain = - em"""|The compiler can't decide which of the possible choices you - |are referencing with $name: A definition of lower precedence - |in an inner scope, or a definition with higher precedence in - |an outer scope. - |Note: - | - Definitions in an enclosing scope take precedence over inherited definitions - | - Definitions take precedence over imports - | - Named imports take precedence over wildcard imports - | - You may replace a name when imported using - | ${hl("import")} scala.{ $name => ${name.show + "Tick"} } - |""" - } - - class MethodDoesNotTakeParameters(tree: tpd.Tree)(using Context) - extends TypeMsg(MethodDoesNotTakeParametersId) { - def methodSymbol: Symbol = - def recur(t: tpd.Tree): Symbol = - val sym = tpd.methPart(t).symbol - if sym == defn.Any_typeCast then - t match - case TypeApply(Select(qual, _), _) => recur(qual) - case _ => sym - else sym - recur(tree) - - def msg = { - val more = if (tree.isInstanceOf[tpd.Apply]) " more" else "" - val meth = methodSymbol - val methStr = if (meth.exists) meth.showLocated else "expression" - em"$methStr does not take$more parameters" - } - - def explain = { - val isNullary = methodSymbol.info.isInstanceOf[ExprType] - val addendum = - if (isNullary) "\nNullary methods may not be called with parenthesis" - else "" - - "You have specified more parameter lists than defined in the method definition(s)." + addendum + i"""Failure to generate given instance for type $pt$argStr) + | + |I found: $tree + |But the part corresponding to `` is not a reference that can be generated. + |This might be because resolution yielded as given instance a function that is not + |known to be total and side-effect free.""" + def explain(using Context) = + i"""The part of given resolution that corresponds to `` produced a term that + |is not a stable reference. Therefore a given instance could not be generated. + | + |To trouble-shoot the problem, try to supply an explicit expression instead of + |relying on implicit search at this point.""" + +class SuperQualMustBeParent(qual: untpd.Ident, cls: ClassSymbol)(using Context) +extends ReferenceMsg(SuperQualMustBeParentID) { + def msg(using Context) = i"""|$qual does not name a parent of $cls""" + def explain(using Context) = + val parents: Seq[String] = (cls.info.parents map (_.typeSymbol.name.show)).sorted + i"""|When a qualifier ${hl("T")} is used in a ${hl("super")} prefix of the form ${hl("C.super[T]")}, + |${hl("T")} must be a parent type of ${hl("C")}. + | + |In this case, the parents of $cls are: + |${parents.mkString(" - ", "\n - ", "")} + |""" +} + +class VarArgsParamMustComeLast()(using Context) +extends SyntaxMsg(VarArgsParamMustComeLastID) { + def msg(using Context) = i"""${hl("varargs")} parameter must come last""" + def explain(using Context) = + i"""|The ${hl("varargs")} field must be the last field in the method signature. + |Attempting to define a field in a method signature after a ${hl("varargs")} field is an error. + |""" +} + +import typer.Typer.BindingPrec + +class ConstrProxyShadows(proxy: TermRef, shadowed: Type, shadowedIsApply: Boolean)(using Context) + extends ReferenceMsg(ConstrProxyShadowsID), NoDisambiguation: + + def clsString(using Context) = proxy.symbol.companionClass.showLocated + def shadowedString(using Context) = shadowed.termSymbol.showLocated + def appClause = if shadowedIsApply then " the apply method of" else "" + def appSuffix = if shadowedIsApply then ".apply" else "" + + def msg(using Context) = + i"""Reference to constructor proxy for $clsString + |shadows outer reference to $shadowedString + | + |The instance needs to be created with an explicit `new`.""" + + def explain(using Context) = + i"""There is an ambiguity in the meaning of the call + | + | ${proxy.symbol.name}(...) + | + |It could mean creating an instance of $clsString with + | + | new ${proxy.symbol.companionClass.name}(...) + | + |Or it could mean calling$appClause $shadowedString as in + | + | ${shadowed.termSymbol.name}$appSuffix(...) + | + |To disambiguate, use an explicit `new` if you mean the former, + |or use a full prefix for ${shadowed.termSymbol.name} if you mean the latter.""" +end ConstrProxyShadows + +class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(using Context) + extends ReferenceMsg(AmbiguousReferenceID), NoDisambiguation { + + /** A string which explains how something was bound; Depending on `prec` this is either + * imported by + * or defined in + */ + private def bindingString(prec: BindingPrec, whereFound: Context, qualifier: String = "")(using Context) = { + val howVisible = prec match { + case BindingPrec.Definition => "defined" + case BindingPrec.Inheritance => "inherited" + case BindingPrec.NamedImport => "imported by name" + case BindingPrec.WildImport => "imported" + case BindingPrec.PackageClause => "found" + case BindingPrec.NothingBound => assert(false) } - - } - - class AmbiguousOverload(tree: tpd.Tree, val alternatives: List[SingleDenotation], pt: Type, addendum: String = "")( - implicit ctx: Context) - extends ReferenceMsg(AmbiguousOverloadID) { - private def all = if (alternatives.length == 2) "both" else "all" - def msg = - em"""|Ambiguous overload. The ${err.overloadedAltsStr(alternatives)} - |$all match ${err.expectedTypeStr(pt)}$addendum""".stripMargin - def explain = - em"""|There are ${alternatives.length} methods that could be referenced as the compiler knows too little - |about the expected type. - |You may specify the expected type e.g. by - |- assigning it to a value with a specified type, or - |- adding a type ascription as in ${hl("instance.myMethod: String => Int")} - |""" - } - - class ReassignmentToVal(name: Name)(using Context) - extends TypeMsg(ReassignmentToValID) { - def msg = em"""Reassignment to val $name""" - def explain = - em"""|You can not assign a new value to $name as values can't be changed. - |Keep in mind that every statement has a value, so you may e.g. use - | ${hl("val")} $name ${hl("= if (condition) 2 else 5")} - |In case you need a reassignable name, you can declare it as - |variable - | ${hl("var")} $name ${hl("=")} ... - |""".stripMargin - } - - class TypeDoesNotTakeParameters(tpe: Type, params: List[Trees.Tree[Trees.Untyped]])(using Context) - extends TypeMsg(TypeDoesNotTakeParametersID) { - private def fboundsAddendum = - if tpe.typeSymbol.isAllOf(Provisional | TypeParam) then - "\n(Note that F-bounds of type parameters may not be type lambdas)" - else "" - def msg = em"$tpe does not take type parameters$fboundsAddendum" - def explain = - val ps = - if (params.size == 1) s"a type parameter ${params.head}" - else s"type parameters ${params.map(_.show).mkString(", ")}" - i"""You specified ${NoColor(ps)} for ${em"$tpe"}, which is not - |declared to take any. - |""" - } - - class ParameterizedTypeLacksArguments(psym: Symbol)(using Context) - extends TypeMsg(ParameterizedTypeLacksArgumentsID) { - def msg = em"Parameterized $psym lacks argument list" - def explain = - em"""The $psym is declared with non-implicit parameters, you may not leave - |out the parameter list when extending it. - |""" - } - - class VarValParametersMayNotBeCallByName(name: TermName, mutable: Boolean)(using Context) - extends SyntaxMsg(VarValParametersMayNotBeCallByNameID) { - def varOrVal = if (mutable) em"${hl("var")}" else em"${hl("val")}" - def msg = s"$varOrVal parameters may not be call-by-name" - def explain = - em"""${hl("var")} and ${hl("val")} parameters of classes and traits may no be call-by-name. In case you - |want the parameter to be evaluated on demand, consider making it just a parameter - |and a ${hl("def")} in the class such as - | ${s"class MyClass(${name}Tick: => String) {"} - | ${s" def $name() = ${name}Tick"} - | ${hl("}")} - |""" - } - - class MissingTypeParameterFor(tpe: Type)(using Context) - extends SyntaxMsg(MissingTypeParameterForID) { - def msg = - if (tpe.derivesFrom(defn.AnyKindClass)) em"${tpe} cannot be used as a value type" - else em"Missing type parameter for ${tpe}" - def explain = "" - } - - class MissingTypeParameterInTypeApp(tpe: Type)(using Context) - extends TypeMsg(MissingTypeParameterInTypeAppID) { - def numParams = tpe.typeParams.length - def parameters = if (numParams == 1) "parameter" else "parameters" - def msg = em"Missing type $parameters for $tpe" - def explain = em"A fully applied type is expected but $tpe takes $numParams $parameters" - } - - class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(using Context) - extends TypeMismatchMsg( - if which == "lower" then bound else tpe, - if which == "lower" then tpe else bound)(DoesNotConformToBoundID): - private def isBounds = tpe match - case TypeBounds(lo, hi) => lo ne hi - case _ => false - override def canExplain = !isBounds - def msg = - if isBounds then - em"Type argument ${tpe} does not overlap with $which bound $bound" - else - em"Type argument ${tpe} does not conform to $which bound $bound" - - class DoesNotConformToSelfType(category: String, selfType: Type, cls: Symbol, - otherSelf: Type, relation: String, other: Symbol)( - implicit ctx: Context) - extends TypeMismatchMsg(selfType, otherSelf)(DoesNotConformToSelfTypeID) { - def msg = em"""$category: self type $selfType of $cls does not conform to self type $otherSelf - |of $relation $other""" - } - - class DoesNotConformToSelfTypeCantBeInstantiated(tp: Type, selfType: Type)( - implicit ctx: Context) - extends TypeMismatchMsg(tp, selfType)(DoesNotConformToSelfTypeCantBeInstantiatedID) { - def msg = em"""$tp does not conform to its self type $selfType; cannot be instantiated""" - } - - class IllegalParameterInit(found: Type, expected: Type, param: Symbol, cls: Symbol)(using Context) - extends TypeMismatchMsg(found, expected)(IllegalParameterInitID): - def msg = - em"""illegal parameter initialization of $param. - | - | The argument passed for $param has type: $found - | but $cls expects $param to have type: $expected""" - - class AbstractMemberMayNotHaveModifier(sym: Symbol, flag: FlagSet)( - implicit ctx: Context) - extends SyntaxMsg(AbstractMemberMayNotHaveModifierID) { - def msg = em"""${hl("abstract")} $sym may not have `${flag.flagsString}` modifier""" - def explain = "" - } - - class TypesAndTraitsCantBeImplicit()(using Context) - extends SyntaxMsg(TypesAndTraitsCantBeImplicitID) { - def msg = em"""${hl("implicit")} modifier cannot be used for types or traits""" - def explain = "" - } - - class OnlyClassesCanBeAbstract(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(OnlyClassesCanBeAbstractID) { - def explain = "" - def msg = em"""${hl("abstract")} modifier can be used only for classes; it should be omitted for abstract members""" - } - - class AbstractOverrideOnlyInTraits(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(AbstractOverrideOnlyInTraitsID) { - def msg = em"""${hl("abstract override")} modifier only allowed for members of traits""" - def explain = "" - } - - class TraitsMayNotBeFinal(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(TraitsMayNotBeFinalID) { - def msg = em"""$sym may not be ${hl("final")}""" - def explain = - "A trait can never be final since it is abstract and must be extended to be useful." - } - - class NativeMembersMayNotHaveImplementation(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(NativeMembersMayNotHaveImplementationID) { - def msg = em"""${hl("@native")} members may not have an implementation""" - def explain = "" - } - - class TraitMayNotDefineNativeMethod(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(TraitMayNotDefineNativeMethodID) { - def msg = em"""A trait cannot define a ${hl("@native")} method.""" - def explain = "" - } - - class OnlyClassesCanHaveDeclaredButUndefinedMembers(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(OnlyClassesCanHaveDeclaredButUndefinedMembersID) { - - private def varNote = - if (sym.is(Mutable)) "Note that variables need to be initialized to be defined." + if (prec.isImportPrec) { + i"""$howVisible$qualifier by ${whereFound.importInfo}""" + } else + i"""$howVisible$qualifier in ${whereFound.owner}""" + } + + def msg(using Context) = + i"""|Reference to $name is ambiguous. + |It is both ${bindingString(newPrec, ctx)} + |and ${bindingString(prevPrec, prevCtx, " subsequently")}""" + + def explain(using Context) = + val precedent = + if newPrec == prevPrec then """two name bindings of equal precedence + |were introduced in the same scope.""".stripMargin + else """a name binding of lower precedence + |in an inner scope cannot shadow a binding with higher precedence in + |an outer scope.""".stripMargin + + i"""|The identifier $name is ambiguous because $precedent + | + |The precedence of the different kinds of name bindings, from highest to lowest, is: + | - Definitions in an enclosing scope + | - Inherited definitions and top-level definitions in packages + | - Names introduced by import of a specific name + | - Names introduced by wildcard import + | - Definitions from packages in other files + |Note: + | - As a rule, definitions take precedence over imports. + | - Definitions in an enclosing scope take precedence over inherited definitions, + | which can result in ambiguities in nested classes. + | - When importing, you can avoid naming conflicts by renaming: + | ${hl("import")} scala.{$name => ${name.show}Tick} + |""" +} + +class MethodDoesNotTakeParameters(tree: tpd.Tree)(using Context) +extends TypeMsg(MethodDoesNotTakeParametersId) { + def methodSymbol(using Context): Symbol = + def recur(t: tpd.Tree): Symbol = + val sym = tpd.methPart(t).symbol + if sym == defn.Any_typeCast then + t match + case TypeApply(Select(qual, _), _) => recur(qual) + case _ => sym + else sym + recur(tree) + + def msg(using Context) = { + val more = if (tree.isInstanceOf[tpd.Apply]) " more" else "" + val meth = methodSymbol + val methStr = if (meth.exists) meth.showLocated else "expression" + i"$methStr does not take$more parameters" + } + + def explain(using Context) = { + val isNullary = methodSymbol.info.isInstanceOf[ExprType] + val addendum = + if (isNullary) "\nNullary methods may not be called with parenthesis" else "" - def msg = em"""Declaration of $sym not allowed here: only classes can have declared but undefined members""" - def explain = s"$varNote" - } - class CannotExtendAnyVal(sym: Symbol)(using Context) - extends SyntaxMsg(CannotExtendAnyValID) { - def msg = em"""$sym cannot extend ${hl("AnyVal")}""" - def explain = - em"""Only classes (not traits) are allowed to extend ${hl("AnyVal")}, but traits may extend - |${hl("Any")} to become ${Green("\"universal traits\"")} which may only have ${hl("def")} members. - |Universal traits can be mixed into classes that extend ${hl("AnyVal")}. - |""" + "You have specified more parameter lists than defined in the method definition(s)." + addendum } - class CannotExtendJavaEnum(sym: Symbol)(using Context) - extends SyntaxMsg(CannotExtendJavaEnumID) { - def msg = em"""$sym cannot extend ${hl("java.lang.Enum")}: only enums defined with the ${hl("enum")} syntax can""" - def explain = "" - } - - class CannotExtendContextFunction(sym: Symbol)(using Context) - extends SyntaxMsg(CannotExtendFunctionID) { - def msg = em"""$sym cannot extend a context function class""" - def explain = "" - } - - class JavaEnumParentArgs(parent: Type)(using Context) - extends TypeMsg(JavaEnumParentArgsID) { - def msg = em"""not enough arguments for constructor Enum: ${hl("(name: String, ordinal: Int)")}: ${hl(parent.show)}""" - def explain = "" - } +} - class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(using Context) - extends SyntaxMsg(CannotHaveSameNameAsID) { - import CannotHaveSameNameAs._ - def reasonMessage: String = reason match { - case CannotBeOverridden => "class definitions cannot be overridden" - case DefinedInSelf(self) => - s"""cannot define ${sym.showKind} member with the same name as a ${cls.showKind} member in self reference ${self.name}. - |(Note: this can be resolved by using another name) - |""".stripMargin - } - - def msg = em"""$sym cannot have the same name as ${cls.showLocated} -- """ + reasonMessage - def explain = "" - } - object CannotHaveSameNameAs { - sealed trait Reason - case object CannotBeOverridden extends Reason - case class DefinedInSelf(self: tpd.ValDef) extends Reason - } - - class ValueClassesMayNotDefineInner(valueClass: Symbol, inner: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotDefineInnerID) { - def msg = em"""Value classes may not define an inner class""" - def explain = "" - } - - class ValueClassesMayNotDefineNonParameterField(valueClass: Symbol, field: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotDefineNonParameterFieldID) { - def msg = em"""Value classes may not define non-parameter field""" - def explain = "" - } - - class ValueClassesMayNotDefineASecondaryConstructor(valueClass: Symbol, constructor: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotDefineASecondaryConstructorID) { - def msg = em"""Value classes may not define a secondary constructor""" - def explain = "" - } - - class ValueClassesMayNotContainInitalization(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotContainInitalizationID) { - def msg = em"""Value classes may not contain initialization statements""" - def explain = "" - } - - class ValueClassesMayNotBeAbstract(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotBeAbstractID) { - def msg = em"""Value classes may not be ${hl("abstract")}""" - def explain = "" - } - - class ValueClassesMayNotBeContainted(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotBeContaintedID) { - private def localOrMember = if (valueClass.owner.isTerm) "local class" else "member of another class" - def msg = s"""Value classes may not be a $localOrMember""" - def explain = "" - } - - class ValueClassesMayNotWrapAnotherValueClass(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotWrapAnotherValueClassID) { - def msg = """A value class may not wrap another user-defined value class""" - def explain = "" - } - - class ValueClassParameterMayNotBeAVar(valueClass: Symbol, param: Symbol)(using Context) - extends SyntaxMsg(ValueClassParameterMayNotBeAVarID) { - def msg = em"""A value class parameter may not be a ${hl("var")}""" - def explain = - em"""A value class must have exactly one ${hl("val")} parameter.""" - } - - class ValueClassNeedsOneValParam(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassNeedsExactlyOneValParamID) { - def msg = em"""Value class needs one ${hl("val")} parameter""" - def explain = "" - } - - class ValueClassParameterMayNotBeCallByName(valueClass: Symbol, param: Symbol)(using Context) - extends SyntaxMsg(ValueClassParameterMayNotBeCallByNameID) { - def msg = s"Value class parameter `${param.name}` may not be call-by-name" - def explain = "" - } - - class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context) - extends SyntaxMsg(SuperCallsNotAllowedInlineableID) { - def msg = em"Super call not allowed in inlineable $symbol" - def explain = "Method inlining prohibits calling superclass methods, as it may lead to confusion about which super is being called." - } - - class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathID): - def msg = em"$tp is not a valid $usage, since it is not an immutable path" - def explain = - i"""An immutable path is - | - a reference to an immutable value, or - | - a reference to `this`, or - | - a selection of an immutable path with an immutable value.""" - - class WrongNumberOfParameters(expected: Int)(using Context) - extends SyntaxMsg(WrongNumberOfParametersID) { - def msg = s"Wrong number of parameters, expected: $expected" - def explain = "" - } - - class DuplicatePrivateProtectedQualifier()(using Context) - extends SyntaxMsg(DuplicatePrivateProtectedQualifierID) { - def msg = "Duplicate private/protected qualifier" - def explain = - em"It is not allowed to combine `private` and `protected` modifiers even if they are qualified to different scopes" - } - - class ExpectedStartOfTopLevelDefinition()(using Context) - extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID) { - def msg = "Expected start of definition" - def explain = - em"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after qualifiers" - } - - class NoReturnFromInlineable(owner: Symbol)(using Context) - extends SyntaxMsg(NoReturnFromInlineableID) { - def msg = em"No explicit ${hl("return")} allowed from inlineable $owner" - def explain = - em"""Methods marked with ${hl("inline")} modifier may not use ${hl("return")} statements. - |Instead, you should rely on the last expression's value being - |returned from a method. - |""" - } - - class ReturnOutsideMethodDefinition(owner: Symbol)(using Context) - extends SyntaxMsg(ReturnOutsideMethodDefinitionID) { - def msg = em"${hl("return")} outside method definition" - def explain = - em"""You used ${hl("return")} in ${owner}. - |${hl("return")} is a keyword and may only be used within method declarations. - |""" - } - - class ExtendFinalClass(clazz:Symbol, finalClazz: Symbol)(using Context) - extends SyntaxMsg(ExtendFinalClassID) { - def msg = em"$clazz cannot extend ${hl("final")} $finalClazz" - def explain = - em"""A class marked with the ${hl("final")} keyword cannot be extended""" - } - - class ExpectedTypeBoundOrEquals(found: Token)(using Context) - extends SyntaxMsg(ExpectedTypeBoundOrEqualsID) { - def msg = em"${hl("=")}, ${hl(">:")}, or ${hl("<:")} expected, but ${Tokens.showToken(found)} found" - - def explain = - em"""Type parameters and abstract types may be constrained by a type bound. - |Such type bounds limit the concrete values of the type variables and possibly - |reveal more information about the members of such types. - | - |A lower type bound ${hl("B >: A")} expresses that the type variable ${hl("B")} - |refers to a supertype of type ${hl("A")}. - | - |An upper type bound ${hl("T <: A")} declares that type variable ${hl("T")} - |refers to a subtype of type ${hl("A")}. - |""" - } - - class ClassAndCompanionNameClash(cls: Symbol, other: Symbol)(using Context) - extends NamingMsg(ClassAndCompanionNameClashID) { - def msg = - val name = cls.name.stripModuleClassSuffix - em"Name clash: both ${cls.owner} and its companion object defines $name" - def explain = - em"""|A ${cls.kindString} and its companion object cannot both define a ${hl("class")}, ${hl("trait")} or ${hl("object")} with the same name: - | - ${cls.owner} defines ${cls} - | - ${other.owner} defines ${other}""" - } - - class TailrecNotApplicable(symbol: Symbol)(using Context) - extends SyntaxMsg(TailrecNotApplicableID) { - def msg = { - val reason = - if (!symbol.is(Method)) em"$symbol isn't a method" - else if (symbol.is(Deferred)) em"$symbol is abstract" - else if (!symbol.isEffectivelyFinal) em"$symbol is neither ${hl("private")} nor ${hl("final")} so can be overridden" - else em"$symbol contains no recursive calls" - - s"TailRec optimisation not applicable, $reason" - } - def explain = "" - } - - class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol], classRoot: Symbol)(using Context) - extends Message(FailureToEliminateExistentialID) { - def kind = MessageKind.Compatibility - def msg = - val originalType = ctx.printer.dclsText(boundSyms, "; ").show - em"""An existential type that came from a Scala-2 classfile for $classRoot - |cannot be mapped accurately to a Scala-3 equivalent. - |original type : $tp forSome ${originalType} - |reduces to : $tp1 - |type used instead: $tp2 - |This choice can cause follow-on type errors or hide type errors. - |Proceed at own risk.""" - def explain = - em"""Existential types in their full generality are no longer supported. - |Scala-3 does applications of class types to wildcard type arguments. - |Other forms of existential types that come from Scala-2 classfiles - |are only approximated in a best-effort way.""" - } - - class OnlyFunctionsCanBeFollowedByUnderscore(tp: Type)(using Context) - extends SyntaxMsg(OnlyFunctionsCanBeFollowedByUnderscoreID) { - def msg = em"Only function types can be followed by ${hl("_")} but the current expression has type $tp" - def explain = - em"""The syntax ${hl("x _")} is no longer supported if ${hl("x")} is not a function. - |To convert to a function value, you need to explicitly write ${hl("() => x")}""" - } - - class MissingEmptyArgumentList(method: String)(using Context) - extends SyntaxMsg(MissingEmptyArgumentListID) { - def msg = em"$method must be called with ${hl("()")} argument" - def explain = { - val codeExample = - """def next(): T = ... - |next // is expanded to next()""" +class AmbiguousOverload(tree: tpd.Tree, val alternatives: List[SingleDenotation], pt: Type, addendum: String = "")( + implicit ctx: Context) +extends ReferenceMsg(AmbiguousOverloadID), NoDisambiguation { + private def all = if (alternatives.length == 2) "both" else "all" + def msg(using Context) = + i"""|Ambiguous overload. The ${err.overloadedAltsStr(alternatives)} + |$all match ${err.expectedTypeStr(pt)}$addendum""" + def explain(using Context) = + i"""|There are ${alternatives.length} methods that could be referenced as the compiler knows too little + |about the expected type. + |You may specify the expected type e.g. by + |- assigning it to a value with a specified type, or + |- adding a type ascription as in ${hl("instance.myMethod: String => Int")} + |""" +} + +class ReassignmentToVal(name: Name)(using Context) + extends TypeMsg(ReassignmentToValID) { + def msg(using Context) = i"""Reassignment to val $name""" + def explain(using Context) = + i"""|You can not assign a new value to $name as values can't be changed. + |Keep in mind that every statement has a value, so you may e.g. use + | ${hl("val")} $name ${hl("= if (condition) 2 else 5")} + |In case you need a reassignable name, you can declare it as + |variable + | ${hl("var")} $name ${hl("=")} ... + |""" +} + +class TypeDoesNotTakeParameters(tpe: Type, params: List[untpd.Tree])(using Context) + extends TypeMsg(TypeDoesNotTakeParametersID) { + private def fboundsAddendum(using Context) = + if tpe.typeSymbol.isAllOf(Provisional | TypeParam) then + "\n(Note that F-bounds of type parameters may not be type lambdas)" + else "" + def msg(using Context) = i"$tpe does not take type parameters$fboundsAddendum" + def explain(using Context) = + val ps = + if (params.size == 1) s"a type parameter ${params.head}" + else s"type parameters ${params.map(_.show).mkString(", ")}" + i"""You specified ${NoColor(ps)} for $tpe, which is not + |declared to take any. + |""" +} + +class VarValParametersMayNotBeCallByName(name: TermName, mutable: Boolean)(using Context) + extends SyntaxMsg(VarValParametersMayNotBeCallByNameID) { + def varOrVal = if mutable then hl("var") else hl("val") + def msg(using Context) = s"$varOrVal parameters may not be call-by-name" + def explain(using Context) = + i"""${hl("var")} and ${hl("val")} parameters of classes and traits may no be call-by-name. In case you + |want the parameter to be evaluated on demand, consider making it just a parameter + |and a ${hl("def")} in the class such as + | ${s"class MyClass(${name}Tick: => String) {"} + | ${s" def $name() = ${name}Tick"} + | ${hl("}")} + |""" +} + +class MissingTypeParameterFor(tpe: Type)(using Context) + extends SyntaxMsg(MissingTypeParameterForID) { + def msg(using Context) = + if tpe.derivesFrom(defn.AnyKindClass) + then i"$tpe cannot be used as a value type" + else i"Missing type parameter for $tpe" + def explain(using Context) = "" +} + +class MissingTypeParameterInTypeApp(tpe: Type)(using Context) + extends TypeMsg(MissingTypeParameterInTypeAppID) { + def numParams = tpe.typeParams.length + def parameters = if (numParams == 1) "parameter" else "parameters" + def msg(using Context) = i"Missing type $parameters for $tpe" + def explain(using Context) = i"A fully applied type is expected but $tpe takes $numParams $parameters" +} + +class MissingArgument(pname: Name, methString: String)(using Context) + extends TypeMsg(MissingArgumentID): + def msg(using Context) = + if pname.firstPart contains '$' then s"not enough arguments for $methString" + else s"missing argument for parameter $pname of $methString" + def explain(using Context) = "" + +class MissingArgumentList(method: String, sym: Symbol)(using Context) + extends TypeMsg(MissingArgumentListID) { + def msg(using Context) = + val symDcl = if sym.exists then "\n\n " + hl(sym.showDcl(using ctx.withoutColors)) else "" + i"missing argument list for $method$symDcl" + def explain(using Context) = { + i"""Unapplied methods are only converted to functions when a function type is expected.""" + } +} + +class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(using Context) + extends TypeMismatchMsg( + if which == "lower" then bound else tpe, + if which == "lower" then tpe else bound)(DoesNotConformToBoundID): + private def isBounds = tpe match + case TypeBounds(lo, hi) => lo ne hi + case _ => false + override def canExplain = !isBounds + def msg(using Context) = + if isBounds then + i"Type argument ${tpe} does not overlap with $which bound $bound" + else + i"Type argument ${tpe} does not conform to $which bound $bound" + +class DoesNotConformToSelfType(category: String, selfType: Type, cls: Symbol, + otherSelf: Type, relation: String, other: Symbol)( + implicit ctx: Context) + extends TypeMismatchMsg(selfType, otherSelf)(DoesNotConformToSelfTypeID) { + def msg(using Context) = i"""$category: self type $selfType of $cls does not conform to self type $otherSelf + |of $relation $other""" +} + +class DoesNotConformToSelfTypeCantBeInstantiated(tp: Type, selfType: Type)( + implicit ctx: Context) + extends TypeMismatchMsg(tp, selfType)(DoesNotConformToSelfTypeCantBeInstantiatedID) { + def msg(using Context) = i"""$tp does not conform to its self type $selfType; cannot be instantiated""" +} + +class IllegalParameterInit(found: Type, expected: Type, param: Symbol, cls: Symbol)(using Context) + extends TypeMismatchMsg(found, expected)(IllegalParameterInitID): + def msg(using Context) = + i"""illegal parameter initialization of $param. + | + | The argument passed for $param has type: $found + | but $cls expects $param to have type: $expected""" + +class AbstractMemberMayNotHaveModifier(sym: Symbol, flag: FlagSet)( + implicit ctx: Context) + extends SyntaxMsg(AbstractMemberMayNotHaveModifierID) { + def msg(using Context) = i"""${hl("abstract")} $sym may not have `${flag.flagsString}` modifier""" + def explain(using Context) = "" +} + +class TypesAndTraitsCantBeImplicit()(using Context) + extends SyntaxMsg(TypesAndTraitsCantBeImplicitID) { + def msg(using Context) = i"""${hl("implicit")} modifier cannot be used for types or traits""" + def explain(using Context) = "" +} + +class OnlyClassesCanBeAbstract(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(OnlyClassesCanBeAbstractID) { + def explain(using Context) = "" + def msg(using Context) = i"""${hl("abstract")} modifier can be used only for classes; it should be omitted for abstract members""" +} + +class AbstractOverrideOnlyInTraits(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(AbstractOverrideOnlyInTraitsID) { + def msg(using Context) = i"""${hl("abstract override")} modifier only allowed for members of traits""" + def explain(using Context) = "" +} + +class TraitsMayNotBeFinal(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(TraitsMayNotBeFinalID) { + def msg(using Context) = i"""$sym may not be ${hl("final")}""" + def explain(using Context) = + "A trait can never be final since it is abstract and must be extended to be useful." +} + +class NativeMembersMayNotHaveImplementation(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(NativeMembersMayNotHaveImplementationID) { + def msg(using Context) = i"""${hl("@native")} members may not have an implementation""" + def explain(using Context) = "" +} + +class TraitMayNotDefineNativeMethod(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(TraitMayNotDefineNativeMethodID) { + def msg(using Context) = i"""A trait cannot define a ${hl("@native")} method.""" + def explain(using Context) = "" +} + +class OnlyClassesCanHaveDeclaredButUndefinedMembers(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(OnlyClassesCanHaveDeclaredButUndefinedMembersID) { + + def msg(using Context) = i"""Declaration of $sym not allowed here: only classes can have declared but undefined members""" + def explain(using Context) = + if sym.is(Mutable) then "Note that variables need to be initialized to be defined." + else "" +} + +class CannotExtendAnyVal(sym: Symbol)(using Context) + extends SyntaxMsg(CannotExtendAnyValID) { + def msg(using Context) = i"""$sym cannot extend ${hl("AnyVal")}""" + def explain(using Context) = + i"""Only classes (not traits) are allowed to extend ${hl("AnyVal")}, but traits may extend + |${hl("Any")} to become ${Green("\"universal traits\"")} which may only have ${hl("def")} members. + |Universal traits can be mixed into classes that extend ${hl("AnyVal")}. + |""" +} - em"""Previously an empty argument list () was implicitly inserted when calling a nullary method without arguments. E.g. - | - |$codeExample - | - |In Dotty, this idiom is an error. The application syntax has to follow exactly the parameter syntax. - |Excluded from this rule are methods that are defined in Java or that override methods defined in Java.""" - } +class CannotExtendJavaEnum(sym: Symbol)(using Context) + extends SyntaxMsg(CannotExtendJavaEnumID) { + def msg(using Context) = i"""$sym cannot extend ${hl("java.lang.Enum")}: only enums defined with the ${hl("enum")} syntax can""" + def explain(using Context) = "" } - class DuplicateNamedTypeParameter(name: Name)(using Context) - extends SyntaxMsg(DuplicateNamedTypeParameterID) { - def msg = em"Type parameter $name was defined multiple times." - def explain = "" +class CannotExtendContextFunction(sym: Symbol)(using Context) + extends SyntaxMsg(CannotExtendFunctionID) { + def msg(using Context) = i"""$sym cannot extend a context function class""" + def explain(using Context) = "" } - class UndefinedNamedTypeParameter(undefinedName: Name, definedNames: List[Name])(using Context) - extends SyntaxMsg(UndefinedNamedTypeParameterID) { - def msg = em"Type parameter $undefinedName is undefined. Expected one of ${definedNames.map(_.show).mkString(", ")}." - def explain = "" +class JavaEnumParentArgs(parent: Type)(using Context) + extends TypeMsg(JavaEnumParentArgsID) { + def msg(using Context) = i"""not enough arguments for constructor Enum: ${hl("(name: String, ordinal: Int)")}: ${hl(parent.show)}""" + def explain(using Context) = "" } - class IllegalStartOfStatement(what: String, isModifier: Boolean, isStat: Boolean)(using Context) extends SyntaxMsg(IllegalStartOfStatementID) { - def msg = - if isStat then - "this kind of statement is not allowed here" - else - val addendum = if isModifier then ": this modifier is not allowed here" else "" - s"Illegal start of $what$addendum" - def explain = - i"""A statement is an import or export, a definition or an expression. - |Some statements are only allowed in certain contexts""" +class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(using Context) + extends NamingMsg(CannotHaveSameNameAsID) { + import CannotHaveSameNameAs._ + def reasonMessage(using Context): String = reason match { + case CannotBeOverridden => "class definitions cannot be overridden" + case DefinedInSelf(self) => + s"""cannot define ${sym.showKind} member with the same name as a ${cls.showKind} member in self reference ${self.name}. + |(Note: this can be resolved by using another name) + |""".stripMargin } - class TraitIsExpected(symbol: Symbol)(using Context) extends SyntaxMsg(TraitIsExpectedID) { - def msg = em"$symbol is not a trait" - def explain = { - val errorCodeExample = - """class A - |class B - | - |val a = new A with B // will fail with a compile error - class B is not a trait""".stripMargin - val codeExample = - """class A - |trait B - | - |val a = new A with B // compiles normally""".stripMargin + def msg(using Context) = i"""$sym cannot have the same name as ${cls.showLocated} -- """ + reasonMessage + def explain(using Context) = "" +} +object CannotHaveSameNameAs { + sealed trait Reason + case object CannotBeOverridden extends Reason + case class DefinedInSelf(self: tpd.ValDef) extends Reason +} + +class ValueClassesMayNotDefineInner(valueClass: Symbol, inner: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotDefineInnerID) { + def msg(using Context) = i"""Value classes may not define an inner class""" + def explain(using Context) = "" +} + +class ValueClassesMayNotDefineNonParameterField(valueClass: Symbol, field: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotDefineNonParameterFieldID) { + def msg(using Context) = i"""Value classes may not define non-parameter field""" + def explain(using Context) = "" +} + +class ValueClassesMayNotDefineASecondaryConstructor(valueClass: Symbol, constructor: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotDefineASecondaryConstructorID) { + def msg(using Context) = i"""Value classes may not define a secondary constructor""" + def explain(using Context) = "" +} + +class ValueClassesMayNotContainInitalization(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotContainInitalizationID) { + def msg(using Context) = i"""Value classes may not contain initialization statements""" + def explain(using Context) = "" +} + +class ValueClassesMayNotBeAbstract(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotBeAbstractID) { + def msg(using Context) = i"""Value classes may not be ${hl("abstract")}""" + def explain(using Context) = "" +} + +class ValueClassesMayNotBeContainted(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotBeContaintedID) { + private def localOrMember = if (valueClass.owner.isTerm) "local class" else "member of another class" + def msg(using Context) = s"""Value classes may not be a $localOrMember""" + def explain(using Context) = "" +} + +class ValueClassesMayNotWrapAnotherValueClass(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotWrapAnotherValueClassID) { + def msg(using Context) = """A value class may not wrap another user-defined value class""" + def explain(using Context) = "" +} + +class ValueClassParameterMayNotBeAVar(valueClass: Symbol, param: Symbol)(using Context) + extends SyntaxMsg(ValueClassParameterMayNotBeAVarID) { + def msg(using Context) = i"""A value class parameter may not be a ${hl("var")}""" + def explain(using Context) = + i"""A value class must have exactly one ${hl("val")} parameter.""" +} + +class ValueClassNeedsOneValParam(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassNeedsExactlyOneValParamID) { + def msg(using Context) = i"""Value class needs one ${hl("val")} parameter""" + def explain(using Context) = "" +} + +class ValueClassParameterMayNotBeCallByName(valueClass: Symbol, param: Symbol)(using Context) + extends SyntaxMsg(ValueClassParameterMayNotBeCallByNameID) { + def msg(using Context) = s"Value class parameter `${param.name}` may not be call-by-name" + def explain(using Context) = "" +} + +class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context) + extends SyntaxMsg(SuperCallsNotAllowedInlineableID) { + def msg(using Context) = i"Super call not allowed in inlineable $symbol" + def explain(using Context) = "Method inlining prohibits calling superclass methods, as it may lead to confusion about which super is being called." +} + +class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathID): + def msg(using Context) = i"$tp is not a valid $usage, since it is not an immutable path" + def explain(using Context) = + i"""An immutable path is + | - a reference to an immutable value, or + | - a reference to `this`, or + | - a selection of an immutable path with an immutable value.""" + +class WrongNumberOfParameters(expected: Int)(using Context) + extends SyntaxMsg(WrongNumberOfParametersID) { + def msg(using Context) = s"Wrong number of parameters, expected: $expected" + def explain(using Context) = "" +} + +class DuplicatePrivateProtectedQualifier()(using Context) + extends SyntaxMsg(DuplicatePrivateProtectedQualifierID) { + def msg(using Context) = "Duplicate private/protected qualifier" + def explain(using Context) = + i"It is not allowed to combine `private` and `protected` modifiers even if they are qualified to different scopes" +} + +class ExpectedStartOfTopLevelDefinition()(using Context) + extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID) { + def msg(using Context) = "Expected start of definition" + def explain(using Context) = + i"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after qualifiers" +} + +class NoReturnFromInlineable(owner: Symbol)(using Context) + extends SyntaxMsg(NoReturnFromInlineableID) { + def msg(using Context) = i"No explicit ${hl("return")} allowed from inlineable $owner" + def explain(using Context) = + i"""Methods marked with ${hl("inline")} modifier may not use ${hl("return")} statements. + |Instead, you should rely on the last expression's value being + |returned from a method. + |""" +} + +class ReturnOutsideMethodDefinition(owner: Symbol)(using Context) + extends SyntaxMsg(ReturnOutsideMethodDefinitionID) { + def msg(using Context) = i"${hl("return")} outside method definition" + def explain(using Context) = + i"""You used ${hl("return")} in ${owner}. + |${hl("return")} is a keyword and may only be used within method declarations. + |""" +} + +class ExtendFinalClass(clazz:Symbol, finalClazz: Symbol)(using Context) + extends SyntaxMsg(ExtendFinalClassID) { + def msg(using Context) = i"$clazz cannot extend ${hl("final")} $finalClazz" + def explain(using Context) = + i"""A class marked with the ${hl("final")} keyword cannot be extended""" +} + +class ExpectedTypeBoundOrEquals(found: Token)(using Context) + extends SyntaxMsg(ExpectedTypeBoundOrEqualsID) { + def msg(using Context) = i"${hl("=")}, ${hl(">:")}, or ${hl("<:")} expected, but ${Tokens.showToken(found)} found" + + def explain(using Context) = + i"""Type parameters and abstract types may be constrained by a type bound. + |Such type bounds limit the concrete values of the type variables and possibly + |reveal more information about the members of such types. + | + |A lower type bound ${hl("B >: A")} expresses that the type variable ${hl("B")} + |refers to a supertype of type ${hl("A")}. + | + |An upper type bound ${hl("T <: A")} declares that type variable ${hl("T")} + |refers to a subtype of type ${hl("A")}. + |""" +} + +class ClassAndCompanionNameClash(cls: Symbol, other: Symbol)(using Context) + extends NamingMsg(ClassAndCompanionNameClashID) { + def msg(using Context) = + val name = cls.name.stripModuleClassSuffix + i"Name clash: both ${cls.owner} and its companion object defines $name" + def explain(using Context) = + i"""|A ${cls.kindString} and its companion object cannot both define a ${hl("class")}, ${hl("trait")} or ${hl("object")} with the same name: + | - ${cls.owner} defines ${cls} + | - ${other.owner} defines ${other}""" +} + +class TailrecNotApplicable(symbol: Symbol)(using Context) + extends SyntaxMsg(TailrecNotApplicableID) { + def msg(using Context) = { + val reason = + if !symbol.is(Method) then i"$symbol isn't a method" + else if symbol.is(Deferred) then i"$symbol is abstract" + else if !symbol.isEffectivelyFinal then i"$symbol is neither ${hl("private")} nor ${hl("final")} so can be overridden" + else i"$symbol contains no recursive calls" + + s"TailRec optimisation not applicable, $reason" + } + def explain(using Context) = "" +} + +class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol], classRoot: Symbol)(using Context) + extends Message(FailureToEliminateExistentialID) { + def kind = MessageKind.Compatibility + def msg(using Context) = + val originalType = ctx.printer.dclsText(boundSyms, "; ").show + i"""An existential type that came from a Scala-2 classfile for $classRoot + |cannot be mapped accurately to a Scala-3 equivalent. + |original type : $tp forSome ${originalType} + |reduces to : $tp1 + |type used instead: $tp2 + |This choice can cause follow-on type errors or hide type errors. + |Proceed at own risk.""" + def explain(using Context) = + i"""Existential types in their full generality are no longer supported. + |Scala-3 does applications of class types to wildcard type arguments. + |Other forms of existential types that come from Scala-2 classfiles + |are only approximated in a best-effort way.""" +} + +class OnlyFunctionsCanBeFollowedByUnderscore(tp: Type)(using Context) + extends SyntaxMsg(OnlyFunctionsCanBeFollowedByUnderscoreID) { + def msg(using Context) = i"Only function types can be followed by ${hl("_")} but the current expression has type $tp" + def explain(using Context) = + i"""The syntax ${hl("x _")} is no longer supported if ${hl("x")} is not a function. + |To convert to a function value, you need to explicitly write ${hl("() => x")}""" +} + +class MissingEmptyArgumentList(method: String)(using Context) + extends SyntaxMsg(MissingEmptyArgumentListID) { + def msg(using Context) = i"$method must be called with ${hl("()")} argument" + def explain(using Context) = { + val codeExample = + """def next(): T = ... + |next // is expanded to next()""" + + i"""Previously an empty argument list () was implicitly inserted when calling a nullary method without arguments. E.g. + | + |$codeExample + | + |In Dotty, this idiom is an error. The application syntax has to follow exactly the parameter syntax. + |Excluded from this rule are methods that are defined in Java or that override methods defined in Java.""" + } +} + +class DuplicateNamedTypeParameter(name: Name)(using Context) + extends SyntaxMsg(DuplicateNamedTypeParameterID) { + def msg(using Context) = i"Type parameter $name was defined multiple times." + def explain(using Context) = "" +} + +class UndefinedNamedTypeParameter(undefinedName: Name, definedNames: List[Name])(using Context) + extends SyntaxMsg(UndefinedNamedTypeParameterID) { + def msg(using Context) = i"Type parameter $undefinedName is undefined. Expected one of ${definedNames.map(_.show).mkString(", ")}." + def explain(using Context) = "" +} + +class IllegalStartOfStatement(what: String, isModifier: Boolean, isStat: Boolean)(using Context) extends SyntaxMsg(IllegalStartOfStatementID) { + def msg(using Context) = + if isStat then + "this kind of statement is not allowed here" + else + val addendum = if isModifier then ": this modifier is not allowed here" else "" + s"Illegal start of $what$addendum" + def explain(using Context) = + i"""A statement is an import or export, a definition or an expression. + |Some statements are only allowed in certain contexts""" +} + +class TraitIsExpected(symbol: Symbol)(using Context) extends SyntaxMsg(TraitIsExpectedID) { + def msg(using Context) = i"$symbol is not a trait" + def explain(using Context) = { + val errorCodeExample = + """class A + |class B + | + |val a = new A with B // will fail with a compile error - class B is not a trait""".stripMargin + val codeExample = + """class A + |trait B + | + |val a = new A with B // compiles normally""".stripMargin - em"""Only traits can be mixed into classes using a ${hl("with")} keyword. - |Consider the following example: - | - |$errorCodeExample - | - |The example mentioned above would fail because B is not a trait. - |But if you make B a trait it will be compiled without any errors: - | - |$codeExample - |""" - } + i"""Only traits can be mixed into classes using a ${hl("with")} keyword. + |Consider the following example: + | + |$errorCodeExample + | + |The example mentioned above would fail because B is not a trait. + |But if you make B a trait it will be compiled without any errors: + | + |$codeExample + |""" } +} - class TraitRedefinedFinalMethodFromAnyRef(method: Symbol)(using Context) extends SyntaxMsg(TraitRedefinedFinalMethodFromAnyRefID) { - def msg = em"Traits cannot redefine final $method from ${hl("class AnyRef")}." - def explain = "" - } +class TraitRedefinedFinalMethodFromAnyRef(method: Symbol)(using Context) extends SyntaxMsg(TraitRedefinedFinalMethodFromAnyRefID) { + def msg(using Context) = i"Traits cannot redefine final $method from ${hl("class AnyRef")}." + def explain(using Context) = "" +} - class AlreadyDefined(name: Name, owner: Symbol, conflicting: Symbol)(using Context) extends NamingMsg(AlreadyDefinedID): - private def where: String = +class AlreadyDefined(name: Name, owner: Symbol, conflicting: Symbol)(using Context) +extends NamingMsg(AlreadyDefinedID): + def msg(using Context) = + def where: String = if conflicting.effectiveOwner.is(Package) && conflicting.associatedFile != null then i" in ${conflicting.associatedFile}" else if conflicting.owner == owner then "" else i" in ${conflicting.owner}" - private def note = + def note = if owner.is(Method) || conflicting.is(Method) then "\n\nNote that overloaded methods must all be defined in the same group of toplevel definitions" else "" - def msg = - if conflicting.isTerm != name.isTermName then - em"$name clashes with $conflicting$where; the two must be defined together" - else - em"$name is already defined as $conflicting$where$note" - def explain = "" - - class PackageNameAlreadyDefined(pkg: Symbol)(using Context) extends NamingMsg(PackageNameAlreadyDefinedID) { - lazy val (where, or) = - if pkg.associatedFile == null then ("", "") - else (s" in ${pkg.associatedFile}", " or delete the containing class file") - def msg = em"""${pkg.name} is the name of $pkg$where. - |It cannot be used at the same time as the name of a package.""" - def explain = - em"""An ${hl("object")} or other toplevel definition cannot have the same name as an existing ${hl("package")}. - |Rename either one of them$or.""" - } - - class UnapplyInvalidNumberOfArguments(qual: untpd.Tree, argTypes: List[Type])(using Context) - extends SyntaxMsg(UnapplyInvalidNumberOfArgumentsID) { - def msg = em"Wrong number of argument patterns for $qual; expected: ($argTypes%, %)" - def explain = - em"""The Unapply method of $qual was used with incorrect number of arguments. - |Expected usage would be something like: - |case $qual(${argTypes.map(_ => '_')}%, %) => ... - | - |where subsequent arguments would have following types: ($argTypes%, %). - |""".stripMargin - } - - class UnapplyInvalidReturnType(unapplyResult: Type, unapplyName: Name)(using Context) - extends DeclarationMsg(UnapplyInvalidReturnTypeID) { - def msg = - val addendum = - if Feature.migrateTo3 && unapplyName == nme.unapplySeq - then "\nYou might want to try to rewrite the extractor to use `unapply` instead." - else "" - em"""| ${Red(i"$unapplyResult")} is not a valid result type of an $unapplyName method of an ${Magenta("extractor")}.$addendum""" - def explain = if (unapplyName.show == "unapply") - em""" - |To be used as an extractor, an unapply method has to return a type that either: - | - has members ${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} (usually an ${Green("Option[S]")}) - | - is a ${Green("Boolean")} - | - is a ${Green("Product")} (like a ${Magenta("Tuple2[T1, T2]")}) - | - |class A(val i: Int) - | - |object B { - | def unapply(a: A): ${Green("Option[Int]")} = Some(a.i) - |} - | - |object C { - | def unapply(a: A): ${Green("Boolean")} = a.i == 2 - |} - | - |object D { - | def unapply(a: A): ${Green("(Int, Int)")} = (a.i, a.i) - |} - | - |object Test { - | def test(a: A) = a match { - | ${Magenta("case B(1)")} => 1 - | ${Magenta("case a @ C()")} => 2 - | ${Magenta("case D(3, 3)")} => 3 - | } - |} - """.stripMargin + if conflicting.isTerm != name.isTermName then + i"$name clashes with $conflicting$where; the two must be defined together" else - em""" - |To be used as an extractor, an unapplySeq method has to return a type which has members - |${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} where ${Magenta("S <: Seq[V]")} (usually an ${Green("Option[Seq[V]]")}): - | - |object CharList { - | def unapplySeq(s: String): ${Green("Option[Seq[Char]")} = Some(s.toList) - | - | "example" match { - | ${Magenta("case CharList(c1, c2, c3, c4, _, _, _)")} => - | println(s"$$c1,$$c2,$$c3,$$c4") - | case _ => - | println("Expected *exactly* 7 characters!") - | } - |} - """.stripMargin - } - - class StaticFieldsOnlyAllowedInObjects(member: Symbol)(using Context) extends SyntaxMsg(StaticFieldsOnlyAllowedInObjectsID) { - def msg = em"${hl("@static")} $member in ${member.owner} must be defined inside a static ${hl("object")}." - def explain = - em"${hl("@static")} members are only allowed inside objects." - } - - class StaticFieldsShouldPrecedeNonStatic(member: Symbol, defns: List[tpd.Tree])(using Context) extends SyntaxMsg(StaticFieldsShouldPrecedeNonStaticID) { - def msg = em"${hl("@static")} $member in ${member.owner} must be defined before non-static fields." - def explain = { - val nonStatics = defns.takeWhile(_.symbol != member).take(3).filter(_.isInstanceOf[tpd.ValDef]) - val codeExample = s"""object ${member.owner.name.firstPart} { - | @static ${member} = ... - | ${nonStatics.map(m => s"${m.symbol} = ...").mkString("\n ")} - | ... - |}""" - em"""The fields annotated with @static should precede any non @static fields. - |This ensures that we do not introduce surprises for users in initialization order of this class. - |Static field are initialized when class loading the code of Foo. - |Non static fields are only initialized the first time that Foo is accessed. - | - |The definition of ${member.name} should have been before the non ${hl("@static val")}s: - |$codeExample + i"$name is already defined as $conflicting$where$note" + def explain(using Context) = "" + +class PackageNameAlreadyDefined(pkg: Symbol)(using Context) extends NamingMsg(PackageNameAlreadyDefinedID) { + def msg(using Context) = + def where = if pkg.associatedFile == null then "" else s" in ${pkg.associatedFile}" + i"""${pkg.name} is the name of $pkg$where. + |It cannot be used at the same time as the name of a package.""" + def explain(using Context) = + def or = if pkg.associatedFile == null then "" else " or delete the containing class file" + i"""An ${hl("object")} or other toplevel definition cannot have the same name as an existing ${hl("package")}. + |Rename either one of them$or.""" +} + +class UnapplyInvalidNumberOfArguments(qual: untpd.Tree, argTypes: List[Type])(using Context) + extends SyntaxMsg(UnapplyInvalidNumberOfArgumentsID) { + def msg(using Context) = i"Wrong number of argument patterns for $qual; expected: ($argTypes%, %)" + def explain(using Context) = + i"""The Unapply method of $qual was used with incorrect number of arguments. + |Expected usage would be something like: + |case $qual(${argTypes.map(_ => '_')}%, %) => ... + | + |where subsequent arguments would have following types: ($argTypes%, %). |""" - } - } - - class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) extends SyntaxMsg(CyclicInheritanceID) { - def msg = em"Cyclic inheritance: $symbol extends itself$addendum" - def explain = { - val codeExample = "class A extends A" - - em"""Cyclic inheritance is prohibited in Dotty. - |Consider the following example: - | - |$codeExample - | - |The example mentioned above would fail because this type of inheritance hierarchy - |creates a "cycle" where a not yet defined class A extends itself which makes - |impossible to instantiate an object of this class""" - } - } - - class BadSymbolicReference(denot: SymDenotation)(using Context) - extends ReferenceMsg(BadSymbolicReferenceID) { - def msg = { - val denotationOwner = denot.owner - val denotationName = ctx.fresh.setSetting(ctx.settings.YdebugNames, true).printer.nameString(denot.name) - val file = denot.symbol.associatedFile - val (location, src) = - if (file != null) (s" in $file", file.toString) - else ("", "the signature") - - em"""Bad symbolic reference. A signature$location - |refers to $denotationName in ${denotationOwner.showKind} ${denotationOwner.showFullName} which is not available. - |It may be completely missing from the current classpath, or the version on - |the classpath might be incompatible with the version used when compiling $src.""" - } - - def explain = "" - } - - class UnableToExtendSealedClass(pclazz: Symbol)(using Context) extends SyntaxMsg(UnableToExtendSealedClassID) { - def msg = em"Cannot extend ${hl("sealed")} $pclazz in a different source file" - def explain = "A sealed class or trait can only be extended in the same file as its declaration" - } - - class SymbolHasUnparsableVersionNumber(symbol: Symbol, errorMessage: String)(using Context) - extends SyntaxMsg(SymbolHasUnparsableVersionNumberID) { - def msg = em"${symbol.showLocated} has an unparsable version number: $errorMessage" - def explain = - em"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics - |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs - |whose behavior may have changed since version change.""" - } - - class SymbolChangedSemanticsInVersion( - symbol: Symbol, - migrationVersion: ScalaVersion, - migrationMessage: String - )(using Context) extends SyntaxMsg(SymbolChangedSemanticsInVersionID) { - def msg = em"${symbol.showLocated} has changed semantics in version $migrationVersion: $migrationMessage" - def explain = - em"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics - |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs - |whose behavior may have changed since version change.""" - } - - class UnableToEmitSwitch()(using Context) - extends SyntaxMsg(UnableToEmitSwitchID) { - def msg = em"Could not emit switch for ${hl("@switch")} annotated match" - def explain = { - val codeExample = - """val ConstantB = 'B' - |final val ConstantC = 'C' - |def tokenMe(ch: Char) = (ch: @switch) match { - | case '\t' | '\n' => 1 - | case 'A' => 2 - | case ConstantB => 3 // a non-literal may prevent switch generation: this would not compile - | case ConstantC => 4 // a constant value is allowed - | case _ => 5 - |}""".stripMargin - - em"""If annotated with ${hl("@switch")}, the compiler will verify that the match has been compiled to a - |tableswitch or lookupswitch and issue an error if it instead compiles into a series of conditional - |expressions. Example usage: - | - |$codeExample - | - |The compiler will not apply the optimisation if: - |- the matched value is not of type ${hl("Int")}, ${hl("Byte")}, ${hl("Short")} or ${hl("Char")} - |- the matched value is not a constant literal - |- there are less than three cases""" - } - } - - class MissingCompanionForStatic(member: Symbol)(using Context) - extends SyntaxMsg(MissingCompanionForStaticID) { - def msg = em"${member.owner} does not have a companion class" - def explain = - em"An object that contains ${hl("@static")} members must have a companion class." - } - - class PolymorphicMethodMissingTypeInParent(rsym: Symbol, parentSym: Symbol)(using Context) - extends SyntaxMsg(PolymorphicMethodMissingTypeInParentID) { - def msg = em"Polymorphic refinement $rsym without matching type in parent $parentSym is no longer allowed" - def explain = - em"""Polymorphic $rsym is not allowed in the structural refinement of $parentSym because - |$rsym does not override any method in $parentSym. Structural refinement does not allow for - |polymorphic methods.""" - } - - class ParamsNoInline(owner: Symbol)(using Context) - extends SyntaxMsg(ParamsNoInlineID) { - def msg = em"""${hl("inline")} modifier can only be used for parameters of inline methods""" - def explain = "" - } - - class JavaSymbolIsNotAValue(symbol: Symbol)(using Context) extends TypeMsg(JavaSymbolIsNotAValueID) { - def msg = { - val kind = - if (symbol is Package) em"$symbol" - else em"Java defined ${hl("class " + symbol.name)}" - - s"$kind is not a value" - } - def explain = "" - } - - class DoubleDefinition(decl: Symbol, previousDecl: Symbol, base: Symbol)(using Context) extends NamingMsg(DoubleDefinitionID) { - def msg = { - def nameAnd = if (decl.name != previousDecl.name) " name and" else "" - def erasedType = if ctx.erasedTypes then i" ${decl.info}" else "" - def details(using Context): String = - if (decl.isRealMethod && previousDecl.isRealMethod) { - import Signature.MatchDegree._ - - // compare the signatures when both symbols represent methods - decl.signature.matchDegree(previousDecl.signature) match { - case NoMatch => - // If the signatures don't match at all at the current phase, then - // they might match after erasure. - if ctx.phase.id <= elimErasedValueTypePhase.id then - atPhase(elimErasedValueTypePhase.next)(details) - else - "" // shouldn't be reachable - case ParamMatch => - "have matching parameter types." - case MethodNotAMethodMatch => - "neither has parameters." - case FullMatch => - val hint = - if !decl.hasAnnotation(defn.TargetNameAnnot) - && !previousDecl.hasAnnotation(defn.TargetNameAnnot) - then - i""" - | - |Consider adding a @targetName annotation to one of the conflicting definitions - |for disambiguation.""" - else "" - i"have the same$nameAnd type$erasedType after erasure.$hint" - } +} + +class UnapplyInvalidReturnType(unapplyResult: Type, unapplyName: Name)(using Context) + extends DeclarationMsg(UnapplyInvalidReturnTypeID) { + def msg(using Context) = + val addendum = + if Feature.migrateTo3 && unapplyName == nme.unapplySeq + then "\nYou might want to try to rewrite the extractor to use `unapply` instead." + else "" + i"""| ${Red(i"$unapplyResult")} is not a valid result type of an $unapplyName method of an ${Magenta("extractor")}.$addendum""" + def explain(using Context) = if (unapplyName.show == "unapply") + i""" + |To be used as an extractor, an unapply method has to return a type that either: + | - has members ${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} (usually an ${Green("Option[S]")}) + | - is a ${Green("Boolean")} + | - is a ${Green("Product")} (like a ${Magenta("Tuple2[T1, T2]")}) + | + |class A(val i: Int) + | + |object B { + | def unapply(a: A): ${Green("Option[Int]")} = Some(a.i) + |} + | + |object C { + | def unapply(a: A): ${Green("Boolean")} = a.i == 2 + |} + | + |object D { + | def unapply(a: A): ${Green("(Int, Int)")} = (a.i, a.i) + |} + | + |object Test { + | def test(a: A) = a match { + | ${Magenta("case B(1)")} => 1 + | ${Magenta("case a @ C()")} => 2 + | ${Magenta("case D(3, 3)")} => 3 + | } + |} + """ + else + i""" + |To be used as an extractor, an unapplySeq method has to return a type which has members + |${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} where ${Magenta("S <: Seq[V]")} (usually an ${Green("Option[Seq[V]]")}): + | + |object CharList { + | def unapplySeq(s: String): ${Green("Option[Seq[Char]")} = Some(s.toList) + | + | "example" match { + | ${Magenta("case CharList(c1, c2, c3, c4, _, _, _)")} => + | println(s"$$c1,$$c2,$$c3,$$c4") + | case _ => + | println("Expected *exactly* 7 characters!") + | } + |} + """ +} + +class StaticFieldsOnlyAllowedInObjects(member: Symbol)(using Context) extends SyntaxMsg(StaticFieldsOnlyAllowedInObjectsID) { + def msg(using Context) = i"${hl("@static")} $member in ${member.owner} must be defined inside a static ${hl("object")}." + def explain(using Context) = + i"${hl("@static")} members are only allowed inside objects." +} + +class StaticFieldsShouldPrecedeNonStatic(member: Symbol, defns: List[tpd.Tree])(using Context) extends SyntaxMsg(StaticFieldsShouldPrecedeNonStaticID) { + def msg(using Context) = i"${hl("@static")} $member in ${member.owner} must be defined before non-static fields." + def explain(using Context) = { + val nonStatics = defns.takeWhile(_.symbol != member).take(3).filter(_.isInstanceOf[tpd.ValDef]) + val codeExample = s"""object ${member.owner.name.firstPart} { + | @static ${member} = ... + | ${nonStatics.map(m => s"${m.symbol} = ...").mkString("\n ")} + | ... + |}""" + i"""The fields annotated with @static should precede any non @static fields. + |This ensures that we do not introduce surprises for users in initialization order of this class. + |Static field are initialized when class loading the code of Foo. + |Non static fields are only initialized the first time that Foo is accessed. + | + |The definition of ${member.name} should have been before the non ${hl("@static val")}s: + |$codeExample + |""" + } +} + +class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) extends SyntaxMsg(CyclicInheritanceID) { + def msg(using Context) = i"Cyclic inheritance: $symbol extends itself$addendum" + def explain(using Context) = { + val codeExample = "class A extends A" + + i"""Cyclic inheritance is prohibited in Dotty. + |Consider the following example: + | + |$codeExample + | + |The example mentioned above would fail because this type of inheritance hierarchy + |creates a "cycle" where a not yet defined class A extends itself which makes + |impossible to instantiate an object of this class""" + } +} + +class BadSymbolicReference(denot: SymDenotation)(using Context) +extends ReferenceMsg(BadSymbolicReferenceID) { + def msg(using Context) = { + val denotationOwner = denot.owner + val denotationName = ctx.fresh.setSetting(ctx.settings.YdebugNames, true).printer.nameString(denot.name) + val file = denot.symbol.associatedFile + val (location, src) = + if (file != null) (s" in $file", file.toString) + else ("", "the signature") + + i"""Bad symbolic reference. A signature$location + |refers to $denotationName in ${denotationOwner.showKind} ${denotationOwner.showFullName} which is not available. + |It may be completely missing from the current classpath, or the version on + |the classpath might be incompatible with the version used when compiling $src.""" + } + + def explain(using Context) = "" +} + +class UnableToExtendSealedClass(pclazz: Symbol)(using Context) extends SyntaxMsg(UnableToExtendSealedClassID) { + def msg(using Context) = i"Cannot extend ${hl("sealed")} $pclazz in a different source file" + def explain(using Context) = "A sealed class or trait can only be extended in the same file as its declaration" +} + +class SymbolHasUnparsableVersionNumber(symbol: Symbol, errorMessage: String)(using Context) +extends SyntaxMsg(SymbolHasUnparsableVersionNumberID) { + def msg(using Context) = i"${symbol.showLocated} has an unparsable version number: $errorMessage" + def explain(using Context) = + i"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics + |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs + |whose behavior may have changed since version change.""" +} + +class SymbolChangedSemanticsInVersion( + symbol: Symbol, + migrationVersion: ScalaVersion, + migrationMessage: String +)(using Context) extends SyntaxMsg(SymbolChangedSemanticsInVersionID) { + def msg(using Context) = i"${symbol.showLocated} has changed semantics in version $migrationVersion: $migrationMessage" + def explain(using Context) = + i"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics + |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs + |whose behavior may have changed since version change.""" +} + +class UnableToEmitSwitch()(using Context) +extends SyntaxMsg(UnableToEmitSwitchID) { + def msg(using Context) = i"Could not emit switch for ${hl("@switch")} annotated match" + def explain(using Context) = { + val codeExample = + """val ConstantB = 'B' + |final val ConstantC = 'C' + |def tokenMe(ch: Char) = (ch: @switch) match { + | case '\t' | '\n' => 1 + | case 'A' => 2 + | case ConstantB => 3 // a non-literal may prevent switch generation: this would not compile + | case ConstantC => 4 // a constant value is allowed + | case _ => 5 + |}""".stripMargin + + i"""If annotated with ${hl("@switch")}, the compiler will verify that the match has been compiled to a + |tableswitch or lookupswitch and issue an error if it instead compiles into a series of conditional + |expressions. Example usage: + | + |$codeExample + | + |The compiler will not apply the optimisation if: + |- the matched value is not of type ${hl("Int")}, ${hl("Byte")}, ${hl("Short")} or ${hl("Char")} + |- the matched value is not a constant literal + |- there are less than three cases""" + } +} + +class MissingCompanionForStatic(member: Symbol)(using Context) +extends SyntaxMsg(MissingCompanionForStaticID) { + def msg(using Context) = i"${member.owner} does not have a companion class" + def explain(using Context) = + i"An object that contains ${hl("@static")} members must have a companion class." +} + +class PolymorphicMethodMissingTypeInParent(rsym: Symbol, parentSym: Symbol)(using Context) +extends SyntaxMsg(PolymorphicMethodMissingTypeInParentID) { + def msg(using Context) = i"Polymorphic refinement $rsym without matching type in parent $parentSym is no longer allowed" + def explain(using Context) = + i"""Polymorphic $rsym is not allowed in the structural refinement of $parentSym because + |$rsym does not override any method in $parentSym. Structural refinement does not allow for + |polymorphic methods.""" +} + +class ParamsNoInline(owner: Symbol)(using Context) + extends SyntaxMsg(ParamsNoInlineID) { + def msg(using Context) = i"""${hl("inline")} modifier can only be used for parameters of inline methods""" + def explain(using Context) = "" +} + +class JavaSymbolIsNotAValue(symbol: Symbol)(using Context) extends TypeMsg(JavaSymbolIsNotAValueID) { + def msg(using Context) = + val kind = + if symbol is Package then i"$symbol" + else i"Java defined ${hl("class " + symbol.name)}" + s"$kind is not a value" + def explain(using Context) = "" +} + +class DoubleDefinition(decl: Symbol, previousDecl: Symbol, base: Symbol)(using Context) +extends NamingMsg(DoubleDefinitionID) { + def msg(using Context) = { + def nameAnd = if (decl.name != previousDecl.name) " name and" else "" + def erasedType = if ctx.erasedTypes then i" ${decl.info}" else "" + def details(using Context): String = + if (decl.isRealMethod && previousDecl.isRealMethod) { + import Signature.MatchDegree._ + + // compare the signatures when both symbols represent methods + decl.signature.matchDegree(previousDecl.signature) match { + case NoMatch => + // If the signatures don't match at all at the current phase, then + // they might match after erasure. + if ctx.phase.id <= elimErasedValueTypePhase.id then + atPhase(elimErasedValueTypePhase.next)(details) + else + "" // shouldn't be reachable + case ParamMatch => + "have matching parameter types." + case MethodNotAMethodMatch => + "neither has parameters." + case FullMatch => + val hint = + if !decl.hasAnnotation(defn.TargetNameAnnot) + && !previousDecl.hasAnnotation(defn.TargetNameAnnot) + then + i""" + | + |Consider adding a @targetName annotation to one of the conflicting definitions + |for disambiguation.""" + else "" + i"have the same$nameAnd type$erasedType after erasure.$hint" } - else "" - def symLocation(sym: Symbol) = { - val lineDesc = - if (sym.span.exists && sym.span != sym.owner.span) - s" at line ${sym.srcPos.line + 1}" - else "" - i"in ${sym.owner}${lineDesc}" } - val clashDescription = - if (decl.owner eq previousDecl.owner) - "Double definition" - else if ((decl.owner eq base) || (previousDecl eq base)) - "Name clash between defined and inherited member" - else - "Name clash between inherited members" - - atPhase(typerPhase) { - em"""$clashDescription: - |${previousDecl.showDcl} ${symLocation(previousDecl)} and - |${decl.showDcl} ${symLocation(decl)} - |""" - } + details + else "" + def symLocation(sym: Symbol) = { + val lineDesc = + if (sym.span.exists && sym.span != sym.owner.span) + s" at line ${sym.srcPos.line + 1}" + else "" + i"in ${sym.owner}${lineDesc}" } - def explain = "" - } - - class ImportRenamedTwice(ident: untpd.Ident)(using Context) extends SyntaxMsg(ImportRenamedTwiceID) { - def msg = s"${ident.show} is renamed twice on the same import line." - def explain = "" - } - - class TypeTestAlwaysDiverges(scrutTp: Type, testTp: Type)(using Context) extends SyntaxMsg(TypeTestAlwaysDivergesID) { - def msg = - s"This type test will never return a result since the scrutinee type ${scrutTp.show} does not contain any value." - def explain = "" - } - - // Relative of CyclicReferenceInvolvingImplicit and RecursiveValueNeedsResultType - class TermMemberNeedsResultTypeForImplicitSearch(cycleSym: Symbol)(using Context) - extends CyclicMsg(TermMemberNeedsNeedsResultTypeForImplicitSearchID) { - def msg = em"""$cycleSym needs result type because its right-hand side attempts implicit search""" - def explain = - em"""|The right hand-side of $cycleSym's definition requires an implicit search at the highlighted position. - |To avoid this error, give `$cycleSym` an explicit type. - |""".stripMargin - } - - class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(using Context) extends SyntaxMsg(ClassCannotExtendEnumID) { - def msg = em"""$cls in ${cls.owner} extends enum ${parent.name}, but extending enums is prohibited.""" - def explain = "" - } - - class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExtractorID) { - def msg = em"$tree cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method" - def explain = - em"""|An ${hl("unapply")} method should be defined in an ${hl("object")} as follow: - | - If it is just a test, return a ${hl("Boolean")}. For example ${hl("case even()")} - | - If it returns a single sub-value of type T, return an ${hl("Option[T]")} - | - If it returns several sub-values T1,...,Tn, group them in an optional tuple ${hl("Option[(T1,...,Tn)]")} - | - |Sometimes, the number of sub-values isn't fixed and we would like to return a sequence. - |For this reason, you can also define patterns through ${hl("unapplySeq")} which returns ${hl("Option[Seq[T]]")}. - |This mechanism is used for instance in pattern ${hl("case List(x1, ..., xn)")}""".stripMargin - } - - class MemberWithSameNameAsStatic()(using Context) - extends SyntaxMsg(MemberWithSameNameAsStaticID) { - def msg = em"Companion classes cannot define members with same name as a ${hl("@static")} member" - def explain = "" - } - - class PureExpressionInStatementPosition(stat: untpd.Tree, val exprOwner: Symbol)(using Context) - extends Message(PureExpressionInStatementPositionID) { - def kind = MessageKind.PotentialIssue - def msg = "A pure expression does nothing in statement position; you may be omitting necessary parentheses" - def explain = - em"""The pure expression $stat doesn't have any side effect and its result is not assigned elsewhere. - |It can be removed without changing the semantics of the program. This may indicate an error.""".stripMargin - } - - class TraitCompanionWithMutableStatic()(using Context) - extends SyntaxMsg(TraitCompanionWithMutableStaticID) { - def msg = em"Companion of traits cannot define mutable @static fields" - def explain = "" - } - - class LazyStaticField()(using Context) - extends SyntaxMsg(LazyStaticFieldID) { - def msg = em"Lazy @static fields are not supported" - def explain = "" - } - - class StaticOverridingNonStaticMembers()(using Context) - extends SyntaxMsg(StaticOverridingNonStaticMembersID) { - def msg = em"${hl("@static")} members cannot override or implement non-static ones" - def explain = "" - } - - class OverloadInRefinement(rsym: Symbol)(using Context) - extends DeclarationMsg(OverloadInRefinementID) { - def msg = "Refinements cannot introduce overloaded definitions" - def explain = - em"""The refinement `$rsym` introduces an overloaded definition. - |Refinements cannot contain overloaded definitions.""".stripMargin - } - - class NoMatchingOverload(val alternatives: List[SingleDenotation], pt: Type)(using Context) - extends TypeMsg(NoMatchingOverloadID) { - def msg = - em"""None of the ${err.overloadedAltsStr(alternatives)} - |match ${err.expectedTypeStr(pt)}""" - def explain = "" - } - class StableIdentPattern(tree: untpd.Tree, pt: Type)(using Context) - extends TypeMsg(StableIdentPatternID) { - def msg = - em"""Stable identifier required, but $tree found""" - def explain = "" - } + val clashDescription = + if (decl.owner eq previousDecl.owner) + "Double definition" + else if ((decl.owner eq base) || (previousDecl eq base)) + "Name clash between defined and inherited member" + else + "Name clash between inherited members" - class IllegalSuperAccessor(base: Symbol, memberName: Name, targetName: Name, - acc: Symbol, accTp: Type, - other: Symbol, otherTp: Type)(using Context) extends DeclarationMsg(IllegalSuperAccessorID) { - def msg = { - // The mixin containing a super-call that requires a super-accessor - val accMixin = acc.owner - // The class or trait that the super-accessor should resolve too in `base` - val otherMixin = other.owner - // The super-call in `accMixin` - val superCall = hl(i"super.$memberName") - // The super-call that the super-accesors in `base` forwards to - val resolvedSuperCall = hl(i"super[${otherMixin.name}].$memberName") - // The super-call that we would have called if `super` in traits behaved like it - // does in classes, i.e. followed the linearization of the trait itself. - val staticSuperCall = { - val staticSuper = accMixin.asClass.info.parents.reverse - .find(_.nonPrivateMember(memberName) - .matchingDenotation(accMixin.thisType, acc.info, targetName).exists) - val staticSuperName = staticSuper match { - case Some(parent) => - parent.classSymbol.name.show - case None => // Might be reachable under separate compilation - "SomeParent" - } - hl(i"super[$staticSuperName].$memberName") + atPhase(typerPhase) { + i"""$clashDescription: + |${previousDecl.showDcl} ${symLocation(previousDecl)} and + |${decl.showDcl} ${symLocation(decl)} + |""" + } + details + } + def explain(using Context) = "" +} + +class ImportRenamedTwice(ident: untpd.Ident)(using Context) extends SyntaxMsg(ImportRenamedTwiceID) { + def msg(using Context) = s"${ident.show} is renamed twice on the same import line." + def explain(using Context) = "" +} + +class TypeTestAlwaysDiverges(scrutTp: Type, testTp: Type)(using Context) extends SyntaxMsg(TypeTestAlwaysDivergesID) { + def msg(using Context) = + s"This type test will never return a result since the scrutinee type ${scrutTp.show} does not contain any value." + def explain(using Context) = "" +} + +// Relative of CyclicReferenceInvolvingImplicit and RecursiveValueNeedsResultType +class TermMemberNeedsResultTypeForImplicitSearch(cycleSym: Symbol)(using Context) + extends CyclicMsg(TermMemberNeedsNeedsResultTypeForImplicitSearchID) { + def msg(using Context) = i"""$cycleSym needs result type because its right-hand side attempts implicit search""" + def explain(using Context) = + i"""|The right hand-side of $cycleSym's definition requires an implicit search at the highlighted position. + |To avoid this error, give `$cycleSym` an explicit type. + |""" +} + +class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(using Context) extends SyntaxMsg(ClassCannotExtendEnumID) { + def msg(using Context) = i"""$cls in ${cls.owner} extends enum ${parent.name}, but extending enums is prohibited.""" + def explain(using Context) = "" +} + +class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExtractorID) { + def msg(using Context) = i"$tree cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method" + def explain(using Context) = + i"""|An ${hl("unapply")} method should be defined in an ${hl("object")} as follow: + | - If it is just a test, return a ${hl("Boolean")}. For example ${hl("case even()")} + | - If it returns a single sub-value of type T, return an ${hl("Option[T]")} + | - If it returns several sub-values T1,...,Tn, group them in an optional tuple ${hl("Option[(T1,...,Tn)]")} + | + |Sometimes, the number of sub-values isn't fixed and we would like to return a sequence. + |For this reason, you can also define patterns through ${hl("unapplySeq")} which returns ${hl("Option[Seq[T]]")}. + |This mechanism is used for instance in pattern ${hl("case List(x1, ..., xn)")}""" +} + +class MemberWithSameNameAsStatic()(using Context) + extends SyntaxMsg(MemberWithSameNameAsStaticID) { + def msg(using Context) = i"Companion classes cannot define members with same name as a ${hl("@static")} member" + def explain(using Context) = "" +} + +class PureExpressionInStatementPosition(stat: untpd.Tree, val exprOwner: Symbol)(using Context) + extends Message(PureExpressionInStatementPositionID) { + def kind = MessageKind.PotentialIssue + def msg(using Context) = "A pure expression does nothing in statement position; you may be omitting necessary parentheses" + def explain(using Context) = + i"""The pure expression $stat doesn't have any side effect and its result is not assigned elsewhere. + |It can be removed without changing the semantics of the program. This may indicate an error.""" +} + +class TraitCompanionWithMutableStatic()(using Context) + extends SyntaxMsg(TraitCompanionWithMutableStaticID) { + def msg(using Context) = i"Companion of traits cannot define mutable @static fields" + def explain(using Context) = "" +} + +class LazyStaticField()(using Context) + extends SyntaxMsg(LazyStaticFieldID) { + def msg(using Context) = i"Lazy @static fields are not supported" + def explain(using Context) = "" +} + +class StaticOverridingNonStaticMembers()(using Context) + extends SyntaxMsg(StaticOverridingNonStaticMembersID) { + def msg(using Context) = i"${hl("@static")} members cannot override or implement non-static ones" + def explain(using Context) = "" +} + +class OverloadInRefinement(rsym: Symbol)(using Context) + extends DeclarationMsg(OverloadInRefinementID) { + def msg(using Context) = "Refinements cannot introduce overloaded definitions" + def explain(using Context) = + i"""The refinement `$rsym` introduces an overloaded definition. + |Refinements cannot contain overloaded definitions.""" +} + +class NoMatchingOverload(val alternatives: List[SingleDenotation], pt: Type)(using Context) + extends TypeMsg(NoMatchingOverloadID) { + def msg(using Context) = + i"""None of the ${err.overloadedAltsStr(alternatives)} + |match ${err.expectedTypeStr(pt)}""" + def explain(using Context) = "" +} +class StableIdentPattern(tree: untpd.Tree, pt: Type)(using Context) + extends TypeMsg(StableIdentPatternID) { + def msg(using Context) = + i"""Stable identifier required, but $tree found""" + def explain(using Context) = "" +} + +class IllegalSuperAccessor(base: Symbol, memberName: Name, targetName: Name, + acc: Symbol, accTp: Type, + other: Symbol, otherTp: Type)(using Context) extends DeclarationMsg(IllegalSuperAccessorID) { + def msg(using Context) = { + // The mixin containing a super-call that requires a super-accessor + val accMixin = acc.owner + // The class or trait that the super-accessor should resolve too in `base` + val otherMixin = other.owner + // The super-call in `accMixin` + val superCall = hl(i"super.$memberName") + // The super-call that the super-accesors in `base` forwards to + val resolvedSuperCall = hl(i"super[${otherMixin.name}].$memberName") + // The super-call that we would have called if `super` in traits behaved like it + // does in classes, i.e. followed the linearization of the trait itself. + val staticSuperCall = { + val staticSuper = accMixin.asClass.info.parents.reverse + .find(_.nonPrivateMember(memberName) + .matchingDenotation(accMixin.thisType, acc.info, targetName).exists) + val staticSuperName = staticSuper match { + case Some(parent) => + parent.classSymbol.name.show + case None => // Might be reachable under separate compilation + "SomeParent" } - ex"""$base cannot be defined due to a conflict between its parents when - |implementing a super-accessor for $memberName in $accMixin: - | - |1. One of its parent (${accMixin.name}) contains a call $superCall in its body, - | and when a super-call in a trait is written without an explicit parent - | listed in brackets, it is implemented by a generated super-accessor in - | the class that extends this trait based on the linearization order of - | the class. - |2. Because ${otherMixin.name} comes before ${accMixin.name} in the linearization - | order of ${base.name}, and because ${otherMixin.name} overrides $memberName, - | the super-accessor in ${base.name} is implemented as a call to - | $resolvedSuperCall. - |3. However, - | ${otherTp.widenExpr} (the type of $resolvedSuperCall in ${base.name}) - | is not a subtype of - | ${accTp.widenExpr} (the type of $memberName in $accMixin). - | Hence, the super-accessor that needs to be generated in ${base.name} - | is illegal. - | - |Here are two possible ways to resolve this: - | - |1. Change the linearization order of ${base.name} such that - | ${accMixin.name} comes before ${otherMixin.name}. - |2. Alternatively, replace $superCall in the body of $accMixin by a - | super-call to a specific parent, e.g. $staticSuperCall - |""".stripMargin + hl(i"super[$staticSuperName].$memberName") } - def explain = "" - } + i"""$base cannot be defined due to a conflict between its parents when + |implementing a super-accessor for $memberName in $accMixin: + | + |1. One of its parent (${accMixin.name}) contains a call $superCall in its body, + | and when a super-call in a trait is written without an explicit parent + | listed in brackets, it is implemented by a generated super-accessor in + | the class that extends this trait based on the linearization order of + | the class. + |2. Because ${otherMixin.name} comes before ${accMixin.name} in the linearization + | order of ${base.name}, and because ${otherMixin.name} overrides $memberName, + | the super-accessor in ${base.name} is implemented as a call to + | $resolvedSuperCall. + |3. However, + | ${otherTp.widenExpr} (the type of $resolvedSuperCall in ${base.name}) + | is not a subtype of + | ${accTp.widenExpr} (the type of $memberName in $accMixin). + | Hence, the super-accessor that needs to be generated in ${base.name} + | is illegal. + | + |Here are two possible ways to resolve this: + | + |1. Change the linearization order of ${base.name} such that + | ${accMixin.name} comes before ${otherMixin.name}. + |2. Alternatively, replace $superCall in the body of $accMixin by a + | super-call to a specific parent, e.g. $staticSuperCall + |""" + } + def explain(using Context) = "" +} + +class TraitParameterUsedAsParentPrefix(cls: Symbol)(using Context) + extends DeclarationMsg(TraitParameterUsedAsParentPrefixID) { + def msg(using Context) = + s"${cls.show} cannot extend from a parent that is derived via its own parameters" + def explain(using Context) = + i""" + |The parent class/trait that ${cls.show} extends from is obtained from + |the parameter of ${cls.show}. This is disallowed in order to prevent + |outer-related Null Pointer Exceptions in Scala. + | + |In order to fix this issue consider directly extending from the parent rather + |than obtaining it from the parameters of ${cls.show}. + |""" +} + +class UnknownNamedEnclosingClassOrObject(name: TypeName)(using Context) + extends ReferenceMsg(UnknownNamedEnclosingClassOrObjectID) { + def msg(using Context) = + i"""no enclosing class or object is named '${hl(name.show)}'""" + def explain(using Context) = + i""" + |The class or object named '${hl(name.show)}' was used as a visibility + |modifier, but could not be resolved. Make sure that + |'${hl(name.show)}' is not misspelled and has been imported into the + |current scope. + """ + } + +class IllegalCyclicTypeReference(sym: Symbol, where: String, lastChecked: Type)(using Context) + extends CyclicMsg(IllegalCyclicTypeReferenceID) { + def msg(using Context) = + val lastCheckedStr = + try lastChecked.show + catch case ex: CyclicReference => "..." + i"illegal cyclic type reference: ${where} ${hl(lastCheckedStr)} of $sym refers back to the type itself" + def explain(using Context) = "" +} + +class ErasedTypesCanOnlyBeFunctionTypes()(using Context) + extends SyntaxMsg(ErasedTypesCanOnlyBeFunctionTypesID) { + def msg(using Context) = "Types with erased keyword can only be function types `(erased ...) => ...`" + def explain(using Context) = "" +} + +class CaseClassMissingNonImplicitParamList(cdef: untpd.TypeDef)(using Context) + extends SyntaxMsg(CaseClassMissingNonImplicitParamListID) { + def msg(using Context) = + i"""|A ${hl("case class")} must have at least one leading non-implicit parameter list""" + + def explain(using Context) = + i"""|${cdef.name} must have at least one leading non-implicit parameter list, + | if you're aiming to have a case class parametrized only by implicit ones, you should + | add an explicit ${hl("()")} as the first parameter list to ${cdef.name}.""" +} + +class EnumerationsShouldNotBeEmpty(cdef: untpd.TypeDef)(using Context) + extends SyntaxMsg(EnumerationsShouldNotBeEmptyID) { + def msg(using Context) = "Enumerations must contain at least one case" + + def explain(using Context) = + i"""|Enumeration ${cdef.name} must contain at least one case + |Example Usage: + | ${hl("enum")} ${cdef.name} { + | ${hl("case")} Option1, Option2 + | } + |""" +} + +class TypedCaseDoesNotExplicitlyExtendTypedEnum(enumDef: Symbol, caseDef: untpd.TypeDef)(using Context) + extends SyntaxMsg(TypedCaseDoesNotExplicitlyExtendTypedEnumID) { + def msg(using Context) = i"explicit extends clause needed because both enum case and enum class have type parameters" + + def explain(using Context) = + i"""Enumerations where the enum class as well as the enum case have type parameters need + |an explicit extends. + |for example: + | ${hl("enum")} ${enumDef.name}[T] { + | ${hl("case")} ${caseDef.name}[U](u: U) ${hl("extends")} ${enumDef.name}[U] + | } + |""" +} + +class IllegalRedefinitionOfStandardKind(kindType: String, name: Name)(using Context) + extends SyntaxMsg(IllegalRedefinitionOfStandardKindID) { + def msg(using Context) = i"illegal redefinition of standard $kindType $name" + def explain(using Context) = + i"""| "$name" is a standard Scala core `$kindType` + | Please choose a different name to avoid conflicts + |""" +} + +class NoExtensionMethodAllowed(mdef: untpd.DefDef)(using Context) + extends SyntaxMsg(NoExtensionMethodAllowedID) { + def msg(using Context) = i"No extension method allowed here, since collective parameters are given" + def explain(using Context) = + i"""|Extension method: + | `${mdef}` + |is defined inside an extension clause which has collective parameters. + |""" +} - class TraitParameterUsedAsParentPrefix(cls: Symbol)(using Context) - extends DeclarationMsg(TraitParameterUsedAsParentPrefixID) { - def msg = - s"${cls.show} cannot extend from a parent that is derived via its own parameters" - def explain = - ex""" - |The parent class/trait that ${cls.show} extends from is obtained from - |the parameter of ${cls.show}. This is disallowed in order to prevent - |outer-related Null Pointer Exceptions in Scala. - | - |In order to fix this issue consider directly extending from the parent rather - |than obtaining it from the parameters of ${cls.show}. - |""".stripMargin - } +class ExtensionMethodCannotHaveTypeParams(mdef: untpd.DefDef)(using Context) + extends SyntaxMsg(ExtensionMethodCannotHaveTypeParamsID) { + def msg(using Context) = i"Extension method cannot have type parameters since some were already given previously" - class UnknownNamedEnclosingClassOrObject(name: TypeName)(using Context) - extends ReferenceMsg(UnknownNamedEnclosingClassOrObjectID) { - def msg = - em"""no enclosing class or object is named '${hl(name.show)}'""" - def explain = - ex""" - |The class or object named '${hl(name.show)}' was used as a visibility - |modifier, but could not be resolved. Make sure that - |'${hl(name.show)}' is not misspelled and has been imported into the - |current scope. - """.stripMargin + def explain(using Context) = + i"""|Extension method: + | `${mdef}` + |has type parameters `[${mdef.leadingTypeParams.map(_.show).mkString(",")}]`, while the extension clause has + |it's own type parameters. Please consider moving these to the extension clause's type parameter list. + |""" +} + +class ExtensionCanOnlyHaveDefs(mdef: untpd.Tree)(using Context) + extends SyntaxMsg(ExtensionCanOnlyHaveDefsID) { + def msg(using Context) = i"Only methods allowed here, since collective parameters are given" + def explain(using Context) = + i"""Extension clauses can only have `def`s + | `${mdef.show}` is not a valid expression here. + |""" +} + +class UnexpectedPatternForSummonFrom(tree: Tree[_])(using Context) + extends SyntaxMsg(UnexpectedPatternForSummonFromID) { + def msg(using Context) = i"Unexpected pattern for summonFrom. Expected ${hl("`x: T`")} or ${hl("`_`")}" + def explain(using Context) = + i"""|The pattern "${tree.show}" provided in the ${hl("case")} expression of the ${hl("summonFrom")}, + | needs to be of the form ${hl("`x: T`")} or ${hl("`_`")}. + | + | Example usage: + | inline def a = summonFrom { + | case x: T => ??? + | } + | + | or + | inline def a = summonFrom { + | case _ => ??? + | } + |""" +} + +class AnonymousInstanceCannotBeEmpty(impl: untpd.Template)(using Context) + extends SyntaxMsg(AnonymousInstanceCannotBeEmptyID) { + def msg(using Context) = i"anonymous instance must implement a type or have at least one extension method" + def explain(using Context) = + i"""|Anonymous instances cannot be defined with an empty body. The block + |`${impl.show}` should either contain an implemented type or at least one extension method. + |""" +} + +class ModifierNotAllowedForDefinition(flag: Flag)(using Context) + extends SyntaxMsg(ModifierNotAllowedForDefinitionID) { + def msg(using Context) = i"Modifier ${hl(flag.flagsString)} is not allowed for this definition" + def explain(using Context) = "" +} + +class RedundantModifier(flag: Flag)(using Context) + extends SyntaxMsg(RedundantModifierID) { + def msg(using Context) = i"Modifier ${hl(flag.flagsString)} is redundant for this definition" + def explain(using Context) = "" +} + +class InvalidReferenceInImplicitNotFoundAnnotation(typeVar: String, owner: String)(using Context) + extends ReferenceMsg(InvalidReferenceInImplicitNotFoundAnnotationID) { + def msg(using Context) = i"""|Invalid reference to a type variable ${hl(typeVar)} found in the annotation argument. + |The variable does not occur as a parameter in the scope of ${hl(owner)}. + |""" + def explain(using Context) = "" +} + +class CaseClassInInlinedCode(tree: tpd.Tree)(using Context) + extends SyntaxMsg(CaseClassInInlinedCodeID) { + + def defKind = if tree.symbol.is(Module) then "object" else "class" + def msg(using Context) = s"Case $defKind definitions are not allowed in inline methods or quoted code. Use a normal $defKind instead." + def explain(using Context) = + i"""Case class/object definitions generate a considerable footprint in code size. + |Inlining such definition would multiply this footprint for each call site. + |""" +} + +class ImplicitSearchTooLargeWarning(limit: Int, openSearchPairs: List[(Candidate, Type)])(using Context) + extends TypeMsg(ImplicitSearchTooLargeID): + override def showAlways = true + def showQuery(query: (Candidate, Type))(using Context): String = + i" ${query._1.ref.symbol.showLocated} for ${query._2}}" + def msg(using Context) = + i"""Implicit search problem too large. + |an implicit search was terminated with failure after trying $limit expressions. + |The root candidate for the search was: + | + |${showQuery(openSearchPairs.last)} + | + |You can change the behavior by setting the `-Ximplicit-search-limit` value. + |Smaller values cause the search to fail faster. + |Larger values might make a very large search problem succeed. + |""" + def explain(using Context) = + i"""The overflow happened with the following lists of tried expressions and target types, + |starting with the root query: + | + |${openSearchPairs.reverse.map(showQuery)}%\n% + """ + +class TargetNameOnTopLevelClass(symbol: Symbol)(using Context) +extends SyntaxMsg(TargetNameOnTopLevelClassID): + def msg(using Context) = i"${hl("@targetName")} annotation not allowed on top-level $symbol" + def explain(using Context) = + val annot = symbol.getAnnotation(defn.TargetNameAnnot).get + i"""The @targetName annotation may be applied to a top-level ${hl("val")} or ${hl("def")}, but not + |a top-level ${hl("class")}, ${hl("trait")}, or ${hl("object")}. + | + |This restriction is due to the naming convention of Java classfiles, whose filenames + |are based on the name of the class defined within. If @targetName were permitted + |here, the name of the classfile would be based on the target name, and the compiler + |could not associate that classfile with the Scala-visible defined name of the class. + | + |If your use case requires @targetName, consider wrapping $symbol in an ${hl("object")} + |(and possibly exporting it), as in the following example: + | + |${hl("object Wrapper:")} + | $annot $symbol { ... } + | + |${hl("export")} Wrapper.${symbol.name} ${hl("// optional")}""" + +class NotClassType(tp: Type)(using Context) +extends TypeMsg(NotClassTypeID), ShowMatchTrace(tp): + def msg(using Context) = i"$tp is not a class type" + def explain(using Context) = "" + +class MissingImplicitArgument( + arg: tpd.Tree, + pt: Type, + where: String, + paramSymWithMethodCallTree: Option[(Symbol, tpd.Tree)] = None, + ignoredInstanceNormalImport: => Option[SearchSuccess], + ignoredConvertibleImplicits: => Iterable[TermRef] + )(using Context) extends TypeMsg(MissingImplicitArgumentID), ShowMatchTrace(pt): + + arg.tpe match + case ambi: AmbiguousImplicits => withoutDisambiguation() + case _ => + + /** Format `raw` implicitNotFound or implicitAmbiguous argument, replacing + * all occurrences of `${X}` where `X` is in `paramNames` with the + * corresponding shown type in `args`. + */ + def userDefinedErrorString(raw: String, paramNames: List[String], args: List[Type])(using Context): String = + def translate(name: String): Option[String] = + val idx = paramNames.indexOf(name) + if (idx >= 0) Some(i"${args(idx)}") else None + """\$\{\s*([^}\s]+)\s*\}""".r.replaceAllIn(raw, (_: Regex.Match) match + case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("")).nn + ) + + /** @param rawMsg Message template with variables, e.g. "Variable A is ${A}" + * @param sym Symbol of the annotated type or of the method whose parameter was annotated + * @param substituteType Function substituting specific types for abstract types associated with variables, e.g A -> Int + */ + def formatAnnotationMessage(rawMsg: String, sym: Symbol, substituteType: Type => Type)(using Context): String = + val substitutableTypesSymbols = substitutableTypeSymbolsInScope(sym) + userDefinedErrorString( + rawMsg, + paramNames = substitutableTypesSymbols.map(_.name.unexpandedName.toString), + args = substitutableTypesSymbols.map(_.typeRef).map(substituteType) + ) + + /** Extract a user defined error message from a symbol `sym` + * with an annotation matching the given class symbol `cls`. + */ + def userDefinedMsg(sym: Symbol, cls: Symbol)(using Context) = + for + ann <- sym.getAnnotation(cls) + msg <- ann.argumentConstantString(0) + yield msg + + def userDefinedImplicitNotFoundTypeMessageFor(sym: Symbol)(using Context): Option[String] = + for + rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) + if Feature.migrateTo3 || sym != defn.Function1 + // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore + yield + val substituteType = (_: Type).asSeenFrom(pt, sym) + formatAnnotationMessage(rawMsg, sym, substituteType) + + /** Extracting the message from a method parameter, e.g. in + * + * trait Foo + * + * def foo(implicit @annotation.implicitNotFound("Foo is missing") foo: Foo): Any = ??? + */ + def userDefinedImplicitNotFoundParamMessage(using Context): Option[String] = + paramSymWithMethodCallTree.flatMap: (sym, applTree) => + userDefinedMsg(sym, defn.ImplicitNotFoundAnnot).map: rawMsg => + val fn = tpd.funPart(applTree) + val targs = tpd.typeArgss(applTree).flatten + val methodOwner = fn.symbol.owner + val methodOwnerType = tpd.qualifier(fn).tpe + val methodTypeParams = fn.symbol.paramSymss.flatten.filter(_.isType) + val methodTypeArgs = targs.map(_.tpe) + val substituteType = (_: Type).asSeenFrom(methodOwnerType, methodOwner).subst(methodTypeParams, methodTypeArgs) + formatAnnotationMessage(rawMsg, sym.owner, substituteType) + + def userDefinedImplicitNotFoundTypeMessage(using Context): Option[String] = + def recur(tp: Type): Option[String] = tp match + case tp: TypeRef => + val sym = tp.symbol + userDefinedImplicitNotFoundTypeMessageFor(sym).orElse(recur(tp.info)) + case tp: ClassInfo => + tp.baseClasses.iterator + .map(userDefinedImplicitNotFoundTypeMessageFor) + .find(_.isDefined).flatten + case tp: TypeProxy => + recur(tp.superType) + case tp: AndType => + recur(tp.tp1).orElse(recur(tp.tp2)) + case _ => + None + recur(pt) + + /** The implicitNotFound annotation on the parameter, or else on the type. + * implicitNotFound message strings starting with `explain=` are intended for + * additional explanations, not the message proper. The leading `explain=` is + * dropped in this case. + * @param explain The message is used for an additional explanation, not + * the message proper. + */ + def userDefinedImplicitNotFoundMessage(explain: Boolean)(using Context): Option[String] = + val explainTag = "explain=" + def filter(msg: Option[String]) = msg match + case Some(str) => + if str.startsWith(explainTag) then + if explain then Some(str.drop(explainTag.length)) else None + else if explain then None + else msg + case None => None + filter(userDefinedImplicitNotFoundParamMessage) + .orElse(filter(userDefinedImplicitNotFoundTypeMessage)) + + object AmbiguousImplicitMsg { + def unapply(search: SearchSuccess): Option[String] = + userDefinedMsg(search.ref.symbol, defn.ImplicitAmbiguousAnnot) + } + + def msg(using Context): String = + + def formatMsg(shortForm: String)(headline: String = shortForm) = arg match + case arg: Trees.SearchFailureIdent[?] => + arg.tpe match + case _: NoMatchingImplicits => headline + case tpe: SearchFailureType => + i"$headline. ${tpe.explanation}" + case _ => headline + case _ => + arg.tpe match + case tpe: SearchFailureType => + val original = arg match + case Inlined(call, _, _) => call + case _ => arg + i"""$headline. + |I found: + | + | ${original.show.replace("\n", "\n ")} + | + |But ${tpe.explanation}.""" + case _ => headline + + def location(preposition: String) = if (where.isEmpty) "" else s" $preposition $where" + + def defaultAmbiguousImplicitMsg(ambi: AmbiguousImplicits) = + s"Ambiguous given instances: ${ambi.explanation}${location("of")}" + + def defaultImplicitNotFoundMessage = + i"No given instance of type $pt was found${location("for")}" + + /** Construct a custom error message given an ambiguous implicit + * candidate `alt` and a user defined message `raw`. + */ + def userDefinedAmbiguousImplicitMsg(alt: SearchSuccess, raw: String) = { + val params = alt.ref.underlying match { + case p: PolyType => p.paramNames.map(_.toString) + case _ => Nil + } + def resolveTypes(targs: List[tpd.Tree])(using Context) = + targs.map(a => Inferencing.fullyDefinedType(a.tpe, "type argument", a.srcPos)) + + // We can extract type arguments from: + // - a function call: + // @implicitAmbiguous("msg A=${A}") + // implicit def f[A](): String = ... + // implicitly[String] // found: f[Any]() + // + // - an eta-expanded function: + // @implicitAmbiguous("msg A=${A}") + // implicit def f[A](x: Int): String = ... + // implicitly[Int => String] // found: x => f[Any](x) + + val call = tpd.closureBody(alt.tree) // the tree itself if not a closure + val targs = tpd.typeArgss(call).flatten + val args = resolveTypes(targs)(using ctx.fresh.setTyperState(alt.tstate)) + userDefinedErrorString(raw, params, args) } - class IllegalCyclicTypeReference(sym: Symbol, where: String, lastChecked: Type)(using Context) - extends CyclicMsg(IllegalCyclicTypeReferenceID) { - def msg = - val lastCheckedStr = - try lastChecked.show - catch case ex: CyclicReference => "..." - i"illegal cyclic type reference: ${where} ${hl(lastCheckedStr)} of $sym refers back to the type itself" - def explain = "" - } - - class ErasedTypesCanOnlyBeFunctionTypes()(using Context) - extends SyntaxMsg(ErasedTypesCanOnlyBeFunctionTypesID) { - def msg = "Types with erased keyword can only be function types `(erased ...) => ...`" - def explain = "" - } - - class CaseClassMissingNonImplicitParamList(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(CaseClassMissingNonImplicitParamListID) { - def msg = - em"""|A ${hl("case class")} must have at least one leading non-implicit parameter list""" - - def explain = - em"""|${cdef.name} must have at least one leading non-implicit parameter list, - | if you're aiming to have a case class parametrized only by implicit ones, you should - | add an explicit ${hl("()")} as the first parameter list to ${cdef.name}.""".stripMargin - } - - class EnumerationsShouldNotBeEmpty(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(EnumerationsShouldNotBeEmptyID) { - def msg = "Enumerations must contain at least one case" - - def explain = - em"""|Enumeration ${cdef.name} must contain at least one case - |Example Usage: - | ${hl("enum")} ${cdef.name} { - | ${hl("case")} Option1, Option2 - | } - |""".stripMargin - } - - class TypedCaseDoesNotExplicitlyExtendTypedEnum(enumDef: Symbol, caseDef: untpd.TypeDef)(using Context) - extends SyntaxMsg(TypedCaseDoesNotExplicitlyExtendTypedEnumID) { - def msg = i"explicit extends clause needed because both enum case and enum class have type parameters" - - def explain = - em"""Enumerations where the enum class as well as the enum case have type parameters need - |an explicit extends. - |for example: - | ${hl("enum")} ${enumDef.name}[T] { - | ${hl("case")} ${caseDef.name}[U](u: U) ${hl("extends")} ${enumDef.name}[U] - | } - |""".stripMargin - } - - class IllegalRedefinitionOfStandardKind(kindType: String, name: Name)(using Context) - extends SyntaxMsg(IllegalRedefinitionOfStandardKindID) { - def msg = em"illegal redefinition of standard $kindType $name" - def explain = - em"""| "$name" is a standard Scala core `$kindType` - | Please choose a different name to avoid conflicts - |""".stripMargin - } - - class NoExtensionMethodAllowed(mdef: untpd.DefDef)(using Context) - extends SyntaxMsg(NoExtensionMethodAllowedID) { - def msg = em"No extension method allowed here, since collective parameters are given" - def explain = - em"""|Extension method: - | `${mdef}` - |is defined inside an extension clause which has collective parameters. - |""".stripMargin - } - - class ExtensionMethodCannotHaveTypeParams(mdef: untpd.DefDef)(using Context) - extends SyntaxMsg(ExtensionMethodCannotHaveTypeParamsID) { - def msg = i"Extension method cannot have type parameters since some were already given previously" - - def explain = - em"""|Extension method: - | `${mdef}` - |has type parameters `[${mdef.leadingTypeParams.map(_.show).mkString(",")}]`, while the extension clause has - |it's own type parameters. Please consider moving these to the extension clause's type parameter list. - |""".stripMargin - } - - class ExtensionCanOnlyHaveDefs(mdef: untpd.Tree)(using Context) - extends SyntaxMsg(ExtensionCanOnlyHaveDefsID) { - def msg = em"Only methods allowed here, since collective parameters are given" - def explain = - em"""Extension clauses can only have `def`s - | `${mdef.show}` is not a valid expression here. - |""".stripMargin - } - - class UnexpectedPatternForSummonFrom(tree: Tree[_])(using Context) - extends SyntaxMsg(UnexpectedPatternForSummonFromID) { - def msg = em"Unexpected pattern for summonFrom. Expected ${hl("`x: T`")} or ${hl("`_`")}" - def explain = - em"""|The pattern "${tree.show}" provided in the ${hl("case")} expression of the ${hl("summonFrom")}, - | needs to be of the form ${hl("`x: T`")} or ${hl("`_`")}. - | - | Example usage: - | inline def a = summonFrom { - | case x: T => ??? - | } - | - | or - | inline def a = summonFrom { - | case _ => ??? - | } - |""".stripMargin - } - - class AnonymousInstanceCannotBeEmpty(impl: untpd.Template)(using Context) - extends SyntaxMsg(AnonymousInstanceCannotBeEmptyID) { - def msg = i"anonymous instance must implement a type or have at least one extension method" - def explain = - em"""|Anonymous instances cannot be defined with an empty body. The block - |`${impl.show}` should either contain an implemented type or at least one extension method. - |""".stripMargin - } - - class ModifierNotAllowedForDefinition(flag: Flag)(using Context) - extends SyntaxMsg(ModifierNotAllowedForDefinitionID) { - def msg = em"Modifier ${hl(flag.flagsString)} is not allowed for this definition" - def explain = "" - } - - class RedundantModifier(flag: Flag)(using Context) - extends SyntaxMsg(RedundantModifierID) { - def msg = em"Modifier ${hl(flag.flagsString)} is redundant for this definition" - def explain = "" - } - - class InvalidReferenceInImplicitNotFoundAnnotation(typeVar: String, owner: String)(using Context) - extends ReferenceMsg(InvalidReferenceInImplicitNotFoundAnnotationID) { - def msg = em"""|Invalid reference to a type variable ${hl(typeVar)} found in the annotation argument. - |The variable does not occur as a parameter in the scope of ${hl(owner)}. - |""".stripMargin - def explain = "" - } - - class CaseClassInInlinedCode(tree: tpd.Tree)(using Context) - extends SyntaxMsg(CaseClassInInlinedCodeID) { - - def defKind = if tree.symbol.is(Module) then "object" else "class" - def msg = s"Case $defKind definitions are not allowed in inline methods or quoted code. Use a normal $defKind instead." - def explain = - em"""Case class/object definitions generate a considerable footprint in code size. - |Inlining such definition would multiply this footprint for each call site. - |""".stripMargin - } - - class ImplicitSearchTooLargeWarning(limit: Int, openSearchPairs: List[(Candidate, Type)])(using Context) - extends TypeMsg(ImplicitSearchTooLargeID): - override def showAlways = true - def showQuery(query: (Candidate, Type)): String = - i" ${query._1.ref.symbol.showLocated} for ${query._2}}" - def msg = - em"""Implicit search problem too large. - |an implicit search was terminated with failure after trying $limit expressions. - |The root candidate for the search was: - | - |${showQuery(openSearchPairs.last)} - | - |You can change the behavior by setting the `-Ximplicit-search-limit` value. - |Smaller values cause the search to fail faster. - |Larger values might make a very large search problem succeed. - |""" - def explain = - em"""The overflow happened with the following lists of tried expressions and target types, - |starting with the root query: - | - |${openSearchPairs.reverse.map(showQuery)}%\n% - """ - - class TargetNameOnTopLevelClass(symbol: Symbol)(using Context) - extends SyntaxMsg(TargetNameOnTopLevelClassID): - def msg = em"${hl("@targetName")} annotation not allowed on top-level $symbol" - def explain = - val annot = symbol.getAnnotation(defn.TargetNameAnnot).get - em"""The @targetName annotation may be applied to a top-level ${hl("val")} or ${hl("def")}, but not - |a top-level ${hl("class")}, ${hl("trait")}, or ${hl("object")}. - | - |This restriction is due to the naming convention of Java classfiles, whose filenames - |are based on the name of the class defined within. If @targetName were permitted - |here, the name of the classfile would be based on the target name, and the compiler - |could not associate that classfile with the Scala-visible defined name of the class. - | - |If your use case requires @targetName, consider wrapping $symbol in an ${hl("object")} - |(and possibly exporting it), as in the following example: - | - |${hl("object Wrapper:")} - | $annot $symbol { ... } - | - |${hl("export")} Wrapper.${symbol.name} ${hl("// optional")}""" + /** Extracting the message from a type, e.g. in + * + * @annotation.implicitNotFound("Foo is missing") + * trait Foo + * + * def foo(implicit foo: Foo): Any = ??? + */ + arg.tpe match + case ambi: AmbiguousImplicits => + (ambi.alt1, ambi.alt2) match + case (alt @ AmbiguousImplicitMsg(msg), _) => + userDefinedAmbiguousImplicitMsg(alt, msg) + case (_, alt @ AmbiguousImplicitMsg(msg)) => + userDefinedAmbiguousImplicitMsg(alt, msg) + case _ => + defaultAmbiguousImplicitMsg(ambi) + case ambi @ TooUnspecific(target) => + i"""No implicit search was attempted${location("for")} + |since the expected type $target is not specific enough""" + case _ => + val shortMessage = userDefinedImplicitNotFoundMessage(explain = false) + .getOrElse(defaultImplicitNotFoundMessage) + formatMsg(shortMessage)() + end msg + + override def msgPostscript(using Context) = + arg.tpe match + case _: AmbiguousImplicits => + "" // show no disambiguation + case _: TooUnspecific => + super.msgPostscript // show just disambigutation and match type trace + case _ => + // show all available additional info + def hiddenImplicitNote(s: SearchSuccess) = + i"\n\nNote: ${s.ref.symbol.showLocated} was not considered because it was not imported with `import given`." + def showImplicitAndConversions(imp: TermRef, convs: Iterable[TermRef]) = + i"\n- ${imp.symbol.showDcl}${convs.map(c => "\n - " + c.symbol.showDcl).mkString}" + def noChainConversionsNote(ignoredConvertibleImplicits: Iterable[TermRef]): Option[String] = + Option.when(ignoredConvertibleImplicits.nonEmpty)( + i"\n\nNote: implicit conversions are not automatically applied to arguments of using clauses. " + + i"You will have to pass the argument explicitly.\n" + + i"The following implicits in scope can be implicitly converted to ${pt.show}:" + + ignoredConvertibleImplicits.map { imp => s"\n- ${imp.symbol.showDcl}"}.mkString + ) + super.msgPostscript + ++ ignoredInstanceNormalImport.map(hiddenImplicitNote) + .orElse(noChainConversionsNote(ignoredConvertibleImplicits)) + .getOrElse(ctx.typer.importSuggestionAddendum(pt)) + + def explain(using Context) = userDefinedImplicitNotFoundMessage(explain = true) + .getOrElse("") +end MissingImplicitArgument + +class CannotBeAccessed(tpe: NamedType, superAccess: Boolean)(using Context) +extends ReferenceMsg(CannotBeAccessedID): + def msg(using Context) = + val pre = tpe.prefix + val name = tpe.name + val alts = tpe.denot.alternatives.map(_.symbol).filter(_.exists) + val whatCanNot = alts match + case Nil => + i"$name cannot" + case sym :: Nil => + i"${if (sym.owner == pre.typeSymbol) sym.show else sym.showLocated} cannot" + case _ => + i"none of the overloaded alternatives named $name can" + val where = if (ctx.owner.exists) s" from ${ctx.owner.enclosingClass}" else "" + val whyNot = new StringBuffer + alts.foreach(_.isAccessibleFrom(pre, superAccess, whyNot)) + i"$whatCanNot be accessed as a member of $pre$where.$whyNot" + def explain(using Context) = "" + +class InlineGivenShouldNotBeFunction()(using Context) +extends SyntaxMsg(InlineGivenShouldNotBeFunctionID): + def msg(using Context) = + i"""An inline given alias with a function value as right-hand side can significantly increase + |generated code size. You should either drop the `inline` or rewrite the given with an + |explicit `apply` method.""" + def explain(using Context) = + i"""A function value on the right-hand side of an inline given alias expands to + |an anonymous class. Each application of the inline given will then create a + |fresh copy of that class, which can increase code size in surprising ways. + |For that reason, functions are discouraged as right hand sides of inline given aliases. + |You should either drop `inline` or rewrite to an explicit `apply` method. E.g. + | + | inline given Conversion[A, B] = x => x.toB + | + |should be re-formulated as + | + | given Conversion[A, B] with + | inline def apply(x: A) = x.toB + """ + +class ValueDiscarding(tp: Type)(using Context) + extends Message(ValueDiscardingID): + def kind = MessageKind.PotentialIssue + def msg(using Context) = i"discarded non-Unit value of type $tp" + def explain(using Context) = "" - class NotClassType(tp: Type)(using Context) - extends TypeMsg(NotClassTypeID), ShowMatchTrace(tp): - def msg = ex"$tp is not a class type" - def explain = "" +class UnusedNonUnitValue(tp: Type)(using Context) + extends Message(UnusedNonUnitValueID): + def kind = MessageKind.PotentialIssue + def msg(using Context) = i"unused value of type $tp" + def explain(using Context) = "" diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala index 7c114b51ed21..8e8d3efb8b40 100644 --- a/compiler/src/dotty/tools/dotc/reporting/trace.scala +++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala @@ -4,10 +4,11 @@ package reporting import scala.language.unsafeNulls -import core.Contexts._ -import config.Config -import config.Printers -import core.Mode +import core.*, Contexts.*, Decorators.* +import config.* +import printing.Formatting.* + +import scala.compiletime.* /** Exposes the {{{ trace("question") { op } }}} syntax. * @@ -51,9 +52,20 @@ trait TraceSyntax: else op inline def apply[T](inline question: String, inline printer: Printers.Printer, inline show: Boolean)(inline op: T)(using Context): T = - inline if isEnabled then - doTrace[T](question, printer, if show then showShowable(_) else alwaysToString)(op) - else op + apply(question, printer, { + val showOp: T => String = inline if show == true then + val showT = summonInline[Show[T]] + { + given Show[T] = showT + t => i"$t" + } + else + summonFrom { + case given Show[T] => t => i"$t" + case _ => alwaysToString + } + showOp + })(op) inline def apply[T](inline question: String, inline printer: Printers.Printer)(inline op: T)(using Context): T = apply[T](question, printer, false)(op) @@ -64,15 +76,11 @@ trait TraceSyntax: inline def apply[T](inline question: String)(inline op: T)(using Context): T = apply[T](question, false)(op) - private def showShowable(x: Any)(using Context) = x match - case x: printing.Showable => x.show - case _ => String.valueOf(x) - private val alwaysToString = (x: Any) => String.valueOf(x) private def doTrace[T](question: => String, printer: Printers.Printer = Printers.default, - showOp: T => String = alwaysToString) + showOp: T => String) (op: => T)(using Context): T = if ctx.mode.is(Mode.Printing) || !isForced && (printer eq Printers.noPrinter) then op else diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index e561b26abf6d..f54baeb7256c 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -737,8 +737,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { var h = initHash p match - case p: WithLazyField[?] => - p.forceIfLazy + case p: WithLazyFields => p.forceFields() case _ => if inlineOrigin.exists then diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index f7b15dc21eb0..fe5c8d061c78 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -143,34 +143,7 @@ class ExtractDependencies extends Phase { def allowLocal = dep.context == DependencyByInheritance || dep.context == LocalDependencyByInheritance if (depFile.extension == "class") { // Dependency is external -- source is undefined - - // The fully qualified name on the JVM of the class corresponding to `dep.to` - val binaryClassName = { - val builder = new StringBuilder - val pkg = dep.to.enclosingPackageClass - if (!pkg.isEffectiveRoot) { - builder.append(pkg.fullName.mangledString) - builder.append(".") - } - val flatName = dep.to.flatName - // Some companion objects are fake (that is, they're a compiler fiction - // that doesn't correspond to a class that exists at runtime), this - // can happen in two cases: - // - If a Java class has static members. - // - If we create constructor proxies for a class (see NamerOps#addConstructorProxies). - // - // In both cases it's vital that we don't send the object name to - // zinc: when sbt is restarted, zinc will inspect the binary - // dependencies to see if they're still on the classpath, if it - // doesn't find them it will invalidate whatever referenced them, so - // any reference to a fake companion will lead to extra recompilations. - // Instead, use the class name since it's guaranteed to exist at runtime. - val clsFlatName = if (dep.to.isOneOf(JavaDefined | ConstructorProxy)) flatName.stripModuleClassSuffix else flatName - builder.append(clsFlatName.mangledString) - builder.toString - } - - processExternalDependency(depFile, binaryClassName) + processExternalDependency(depFile, dep.to.binaryClassName) } else if (allowLocal || depFile.file != sourceFile) { // We cannot ignore dependencies coming from the same source file because // the dependency info needs to propagate. See source-dependencies/trait-trait-211. @@ -190,7 +163,7 @@ object ExtractDependencies { /** Report an internal error in incremental compilation. */ def internalError(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = - report.error(s"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) + report.error(em"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) } private case class ClassDependency(from: Symbol, to: Symbol, context: DependencyContext) @@ -333,6 +306,13 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT } } + private def addInheritanceDependencies(tree: Closure)(using Context): Unit = + // If the tpt is empty, this is a non-SAM lambda, so no need to register + // an inheritance relationship. + if !tree.tpt.isEmpty then + val from = resolveDependencySource + _dependencies += ClassDependency(from, tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) + private def addInheritanceDependencies(tree: Template)(using Context): Unit = if (tree.parents.nonEmpty) { val depContext = depContextOf(tree.symbol.owner) @@ -396,6 +376,8 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT case ref: RefTree => addMemberRefDependency(ref.symbol) addTypeDependency(ref.tpe) + case t: Closure => + addInheritanceDependencies(t) case t: Template => addInheritanceDependencies(t) case _ => diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 071efb1fb91c..91614aaccad2 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -24,6 +24,7 @@ import scala.annotation.{ threadUnsafe => tu, tailrec } import scala.PartialFunction.condOpt import dotty.tools.dotc.{semanticdb => s} +import dotty.tools.io.{AbstractFile, JarArchive} /** Extract symbol references and uses to semanticdb files. * See https://scalameta.org/docs/semanticdb/specification.html#symbol-1 @@ -38,7 +39,9 @@ class ExtractSemanticDB extends Phase: override val description: String = ExtractSemanticDB.description override def isRunnable(using Context) = - super.isRunnable && ctx.settings.Xsemanticdb.value + import ExtractSemanticDB.{semanticdbTarget, outputDirectory} + def writesToOutputJar = semanticdbTarget.isEmpty && outputDirectory.isInstanceOf[JarArchive] + super.isRunnable && ctx.settings.Xsemanticdb.value && !writesToOutputJar // Check not needed since it does not transform trees override def isCheckable: Boolean = false @@ -187,7 +190,7 @@ class ExtractSemanticDB extends Phase: registerUseGuarded(None, privateWithin, spanOfSymbol(privateWithin, tree.span, tree.source), tree.source) else if !excludeSymbol(tree.symbol) then registerSymbol(tree.symbol, symbolKinds(tree)) - case tree: Template if tree.symbol.owner.is(Invisible) => + case tree: Template if tree.symbol != NoSymbol && tree.symbol.owner.is(Invisible) => // do nothing // exclude the symbols and synthetics generated by @main annotation // (main class generated by @main has `Invisible` flag, see `MainProxies.scala`). @@ -198,7 +201,7 @@ class ExtractSemanticDB extends Phase: val selfSpan = tree.self.span if selfSpan.exists && selfSpan.hasLength then traverse(tree.self) - if tree.symbol.owner.isEnumClass then + if tree.symbol != NoSymbol && tree.symbol.owner.isEnumClass then tree.body.foreachUntilImport(traverse).foreach(traverse) // the first import statement else tree.body.foreach(traverse) @@ -475,6 +478,13 @@ object ExtractSemanticDB: val name: String = "extractSemanticDB" val description: String = "extract info into .semanticdb files" + private def semanticdbTarget(using Context): Option[Path] = + Option(ctx.settings.semanticdbTarget.value) + .filterNot(_.isEmpty) + .map(Paths.get(_)) + + private def outputDirectory(using Context): AbstractFile = ctx.settings.outputDir.value + def write( source: SourceFile, occurrences: List[SymbolOccurrence], @@ -482,14 +492,8 @@ object ExtractSemanticDB: synthetics: List[Synthetic], )(using Context): Unit = def absolutePath(path: Path): Path = path.toAbsolutePath.normalize - val semanticdbTarget = - val semanticdbTargetSetting = ctx.settings.semanticdbTarget.value - absolutePath( - if semanticdbTargetSetting.isEmpty then ctx.settings.outputDir.value.jpath - else Paths.get(semanticdbTargetSetting) - ) val relPath = SourceFile.relativePath(source, ctx.settings.sourceroot.value) - val outpath = semanticdbTarget + val outpath = absolutePath(semanticdbTarget.getOrElse(outputDirectory.jpath)) .resolve("META-INF") .resolve("semanticdb") .resolve(relPath) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala index c825032373f8..c7b0dfd437db 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala @@ -74,7 +74,9 @@ class SemanticSymbolBuilder: def addOwner(owner: Symbol): Unit = if !owner.isRoot then addSymName(b, owner) - def addOverloadIdx(sym: Symbol): Unit = + def addOverloadIdx(initSym: Symbol): Unit = + // revert from the compiler-generated overload of the signature polymorphic method + val sym = initSym.originalSignaturePolymorphic.symbol.orElse(initSym) val decls = val decls0 = sym.owner.info.decls.lookupAll(sym.name) if sym.owner.isAllOf(JavaModule) then diff --git a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala new file mode 100644 index 000000000000..98e060488f43 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala @@ -0,0 +1,246 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.ast.{tpd, untpd} +import dotty.tools.dotc.core.Annotations._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.NameKinds._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.staging.QuoteContext.* +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.QuoteTypeTags.* +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.util.SrcPos + +/** Checks that staging level consistency holds and heals staged types . + * + * Local term references are level consistent if and only if they are used at the same level as their definition. + * + * Local type references can be used at the level of their definition or lower. If used used at a higher level, + * it will be healed if possible, otherwise it is inconsistent. + * + * Type healing consists in transforming a level inconsistent type `T` into `summon[Type[T]].Underlying`. + * + * As references to types do not necessarily have an associated tree it is not always possible to replace the types directly. + * Instead we always generate a type alias for it and place it at the start of the surrounding quote. This also avoids duplication. + * For example: + * '{ + * val x: List[T] = List[T]() + * () + * } + * + * is transformed to + * + * '{ + * type t$1 = summon[Type[T]].Underlying + * val x: List[t$1] = List[t$1](); + * () + * } + * + */ +class CrossStageSafety extends TreeMapWithStages { + import tpd._ + + private val InAnnotation = Property.Key[Unit]() + + override def transform(tree: Tree)(using Context): Tree = + if (tree.source != ctx.source && tree.source.exists) + transform(tree)(using ctx.withSource(tree.source)) + else if !isInQuoteOrSplice then + checkAnnotations(tree) + super.transform(tree) + else tree match { + case _: TypeTree => + val tp1 = transformTypeAnnotationSplices(tree.tpe) + val healedType = healType(tree.srcPos)(tp1) + if healedType == tree.tpe then tree + else TypeTree(healedType).withSpan(tree.span) + case _: RefTree if tree.isType => + val healedType = healType(tree.srcPos)(tree.tpe) + if healedType == tree.tpe then tree + else TypeTree(healedType).withSpan(tree.span) + case tree: Ident if isWildcardArg(tree) => + tree.withType(healType(tree.srcPos)(tree.tpe)) + case tree: Ident => // this is a term Ident + checkLevelConsistency(tree) + tree + case tree: This => + checkLevelConsistency(tree) + tree + case _: AppliedTypeTree => + super.transform(tree) match + case tree1: AppliedTypeTree if tree1 ne tree => + // propagate healed types + tree1.withType(tree1.tpt.tpe.appliedTo(tree1.args.map(_.tpe))) + case tree1 => tree1 + case tree: DefDef if tree.symbol.is(Inline) && level > 0 => + EmptyTree // Remove inline defs in quoted code. Already fully inlined. + case tree: ValOrDefDef => + checkAnnotations(tree) + healInfo(tree, tree.tpt.srcPos) + super.transform(tree) + case tree: Bind => + checkAnnotations(tree) + healInfo(tree, tree.srcPos) + super.transform(tree) + case tree: UnApply => + super.transform(tree).withType(healType(tree.srcPos)(tree.tpe)) + case tree: TypeDef if tree.symbol.is(Case) && level > 0 => + report.error(reporting.CaseClassInInlinedCode(tree), tree) + super.transform(tree) + case _ => + super.transform(tree) + } + + /** Transform quoted trees while maintaining level correctness */ + override protected def transformQuotation(body: Tree, quote: Apply)(using Context): Tree = { + val taggedTypes = new QuoteTypeTags(quote.span) + + if (ctx.property(InAnnotation).isDefined) + report.error("Cannot have a quote in an annotation", quote.srcPos) + + val stripAnnotsDeep: TypeMap = new TypeMap: + def apply(tp: Type): Type = mapOver(tp.stripAnnots) + + def transformBody() = + val contextWithQuote = + if level == 0 then contextWithQuoteTypeTags(taggedTypes)(using quoteContext) + else quoteContext + val transformedBody = transform(body)(using contextWithQuote) + taggedTypes.getTypeTags match + case Nil => transformedBody + case tags => tpd.Block(tags, transformedBody).withSpan(body.span) + + if body.isTerm then + val transformedBody = transformBody() + // `quoted.runtime.Expr.quote[T]()` --> `quoted.runtime.Expr.quote[T2]()` + val TypeApply(fun, targs) = quote.fun: @unchecked + val targs2 = targs.map(targ => TypeTree(healType(quote.fun.srcPos)(stripAnnotsDeep(targ.tpe)))) + cpy.Apply(quote)(cpy.TypeApply(quote.fun)(fun, targs2), transformedBody :: Nil) + else + body.tpe match + case DirectTypeOf(termRef) => + // Optimization: `quoted.Type.of[x.Underlying](quotes)` --> `x` + ref(termRef).withSpan(quote.span) + case _ => + transformBody() match + case DirectTypeOf.Healed(termRef) => + // Optimization: `quoted.Type.of[@SplicedType type T = x.Underlying; T](quotes)` --> `x` + ref(termRef).withSpan(quote.span) + case transformedBody => + val quotes = quote.args.mapConserve(transform) + // `quoted.Type.of[](quotes)` --> `quoted.Type.of[](quotes)` + val TypeApply(fun, _) = quote.fun: @unchecked + cpy.Apply(quote)(cpy.TypeApply(quote.fun)(fun, transformedBody :: Nil), quotes) + + } + + /** Transform splice + * - If inside a quote, transform the contents of the splice. + * - If inside inlined code, expand the macro code. + * - If inside of a macro definition, check the validity of the macro. + */ + protected def transformSplice(body: Tree, splice: Apply)(using Context): Tree = { + val body1 = transform(body)(using spliceContext) + splice.fun match { + case fun @ TypeApply(_, _ :: Nil) => + // Type of the splice itself must also be healed + // `quoted.runtime.Expr.quote[F[T]](... T ...)` --> `internal.Quoted.expr[F[$t]](... T ...)` + val tp = healType(splice.srcPos)(splice.tpe.widenTermRefExpr) + cpy.Apply(splice)(cpy.TypeApply(fun)(fun.fun, tpd.TypeTree(tp) :: Nil), body1 :: Nil) + case f @ Apply(fun @ TypeApply(_, _), quotes :: Nil) => + // Type of the splice itself must also be healed + // `quoted.runtime.Expr.quote[F[T]](... T ...)` --> `internal.Quoted.expr[F[$t]](... T ...)` + val tp = healType(splice.srcPos)(splice.tpe.widenTermRefExpr) + cpy.Apply(splice)(cpy.Apply(f)(cpy.TypeApply(fun)(fun.fun, tpd.TypeTree(tp) :: Nil), quotes :: Nil), body1 :: Nil) + } + } + + protected def transformSpliceType(body: Tree, splice: Select)(using Context): Tree = { + val body1 = transform(body)(using spliceContext) + if ctx.reporter.hasErrors then + splice + else + val tagRef = getQuoteTypeTags.getTagRef(splice.qualifier.tpe.asInstanceOf[TermRef]) + ref(tagRef).withSpan(splice.span) + } + + def transformTypeAnnotationSplices(tp: Type)(using Context) = new TypeMap { + def apply(tp: Type): Type = tp match + case tp: AnnotatedType => + val newAnnotTree = transform(tp.annot.tree) + derivedAnnotatedType(tp, apply(tp.parent), tp.annot.derivedAnnotation(newAnnotTree)) + case _ => + mapOver(tp) + }.apply(tp) + + /** Check that annotations do not contain quotes and and that splices are valid */ + private def checkAnnotations(tree: Tree)(using Context): Unit = + tree match + case tree: DefTree => + lazy val annotCtx = ctx.fresh.setProperty(InAnnotation, true).withOwner(tree.symbol) + for (annot <- tree.symbol.annotations) annot match + case annot: BodyAnnotation => annot // already checked in PrepareInlineable before the creation of the BodyAnnotation + case annot => transform(annot.tree)(using annotCtx) + case _ => + + /** Heal types in the info of the given tree */ + private def healInfo(tree: Tree, pos: SrcPos)(using Context): Unit = + tree.symbol.info = healType(pos)(tree.symbol.info) + + /** If the type refers to a locally defined symbol (either directly, or in a pickled type), + * check that its staging level matches the current level. + * - Static types and term are allowed at any level. + * - If a type reference is used a higher level, then it is inconsistent. + * Will attempt to heal before failing. + * - If a term reference is used a higher level, then it is inconsistent. + * It cannot be healed because the term will not exist in any future stage. + * + * If `T` is a reference to a type at the wrong level, try to heal it by replacing it with + * a type tag of type `quoted.Type[T]`. + * The tag is generated by an instance of `QuoteTypeTags` directly if the splice is explicit + * or indirectly by `tryHeal`. + */ + protected def healType(pos: SrcPos)(using Context) = + new HealType(pos) + + /** Check level consistency of terms references */ + private def checkLevelConsistency(tree: Ident | This)(using Context): Unit = + new TypeTraverser { + def traverse(tp: Type): Unit = + tp match + case tp @ TermRef(NoPrefix, _) if !tp.symbol.isStatic && level != levelOf(tp.symbol) => + levelError(tp.symbol, tp, tree.srcPos) + case tp: ThisType if level != -1 && level != levelOf(tp.cls) => + levelError(tp.cls, tp, tree.srcPos) + case tp: AnnotatedType => + traverse(tp.parent) + case _ if tp.typeSymbol.is(Package) => + // OK + case _ => + traverseChildren(tp) + }.traverse(tree.tpe) + + private def levelError(sym: Symbol, tp: Type, pos: SrcPos)(using Context): tp.type = { + def symStr = + if (!tp.isInstanceOf[ThisType]) sym.show + else if (sym.is(ModuleClass)) sym.sourceModule.show + else i"${sym.name}.this" + val hint = + if sym.is(Inline) && levelOf(sym) < level then + "\n\n" + + "Hint: Staged references to inline definition in quotes are only inlined after the quote is spliced into level 0 code by a macro. " + + "Try moving this inline definition in a statically accessible location such as an object (this definition can be private)." + else "" + report.error( + em"""access to $symStr from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level.$hint""", pos) + tp + } +} diff --git a/compiler/src/dotty/tools/dotc/staging/DirectTypeOf.scala b/compiler/src/dotty/tools/dotc/staging/DirectTypeOf.scala new file mode 100644 index 000000000000..488d8ff2a88e --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/DirectTypeOf.scala @@ -0,0 +1,25 @@ +package dotty.tools.dotc.staging + +import dotty.tools.dotc.ast.{tpd, untpd} +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ + +object DirectTypeOf: + import tpd.* + + /** Matches `x.Underlying` and extracts the TermRef to `x` */ + def unapply(tpe: Type)(using Context): Option[TermRef] = tpe match + case tp @ TypeRef(x: TermRef, _) if tp.symbol == defn.QuotedType_splice => Some(x) + case _ => None + + object Healed: + /** Matches `{ @SplicedType type T = x.Underlying; T }` and extracts the TermRef to `x` */ + def unapply(body: Tree)(using Context): Option[TermRef] = + body match + case Block(List(tdef: TypeDef), tpt: TypeTree) => + tpt.tpe match + case tpe: TypeRef if tpe.typeSymbol == tdef.symbol => + DirectTypeOf.unapply(tdef.rhs.tpe.hiBound) + case _ => None + case _ => None diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala new file mode 100644 index 000000000000..4f59e92241fb --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -0,0 +1,114 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.staging.QuoteContext.* +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.QuoteTypeTags.* +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.typer.Implicits.SearchFailureType +import dotty.tools.dotc.util.SrcPos + +class HealType(pos: SrcPos)(using Context) extends TypeMap { + + /** If the type refers to a locally defined symbol (either directly, or in a pickled type), + * check that its staging level matches the current level. + * - Static types and term are allowed at any level. + * - If a type reference is used a higher level, then it is inconsistent. + * Will attempt to heal before failing. + * - If a term reference is used a higher level, then it is inconsistent. + * It cannot be healed because the term will not exist in any future stage. + * + * If `T` is a reference to a type at the wrong level, try to heal it by replacing it with + * a type tag of type `quoted.Type[T]`. + * The tag is generated by an instance of `QuoteTypeTags` directly if the splice is explicit + * or indirectly by `tryHeal`. + */ + def apply(tp: Type): Type = + tp match + case NonSpliceAlias(aliased) => this.apply(aliased) + case tp: TypeRef => healTypeRef(tp) + case tp: TermRef => + val inconsistentRoot = levelInconsistentRootOfPath(tp) + if inconsistentRoot.exists then levelError(inconsistentRoot, tp, pos) + else tp + case tp: AnnotatedType => + derivedAnnotatedType(tp, apply(tp.parent), tp.annot) + case _ => + mapOver(tp) + + private def healTypeRef(tp: TypeRef): Type = + tp.prefix match + case NoPrefix if tp.typeSymbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) => + tp + case prefix: TermRef if tp.symbol.isTypeSplice => + checkNotWildcardSplice(tp) + if level == 0 then tp else getQuoteTypeTags.getTagRef(prefix) + case _: NamedType | _: ThisType | NoPrefix => + if levelInconsistentRootOfPath(tp).exists then + tryHeal(tp.symbol, tp, pos) + else + tp + case _ => + mapOver(tp) + + private object NonSpliceAlias: + def unapply(tp: TypeRef)(using Context): Option[Type] = tp.underlying match + case TypeAlias(alias) if !tp.symbol.isTypeSplice && !tp.typeSymbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) => Some(alias) + case _ => None + + private def checkNotWildcardSplice(splice: TypeRef): Unit = + splice.prefix.termSymbol.info.argInfos match + case (tb: TypeBounds) :: _ => report.error(em"Cannot splice $splice because it is a wildcard type", pos) + case _ => + + /** Return the root of this path if it is a variable defined in a previous level. + * If the path is consistent, return NoSymbol. + */ + private def levelInconsistentRootOfPath(tp: Type)(using Context): Symbol = + tp match + case tp @ NamedType(NoPrefix, _) if level > levelOf(tp.symbol) => tp.symbol + case tp: NamedType if !tp.symbol.isStatic => levelInconsistentRootOfPath(tp.prefix) + case tp: ThisType if level > levelOf(tp.cls) => tp.cls + case _ => NoSymbol + + /** Try to heal reference to type `T` used in a higher level than its definition. + * Returns a reference to a type tag generated by `QuoteTypeTags` that contains a + * reference to a type alias containing the equivalent of `${summon[quoted.Type[T]]}`. + * Emits an error if `T` cannot be healed and returns `T`. + */ + protected def tryHeal(sym: Symbol, tp: TypeRef, pos: SrcPos): Type = { + val reqType = defn.QuotedTypeClass.typeRef.appliedTo(tp) + val tag = ctx.typer.inferImplicitArg(reqType, pos.span) + tag.tpe match + case tp: TermRef => + ctx.typer.checkStable(tp, pos, "type witness") + if levelOf(tp.symbol) > 0 then tp.select(tpnme.Underlying) + else getQuoteTypeTags.getTagRef(tp) + case _: SearchFailureType => + report.error( + ctx.typer.missingArgMsg(tag, reqType, "") + .prepend(i"Reference to $tp within quotes requires a given $reqType in scope.\n") + .append("\n"), + pos) + tp + case _ => + report.error(em"""Reference to $tp within quotes requires a given $reqType in scope. + | + |""", pos) + tp + } + + private def levelError(sym: Symbol, tp: Type, pos: SrcPos): tp.type = { + report.error( + em"""access to $sym from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level""", pos) + tp + } +} diff --git a/compiler/src/dotty/tools/dotc/staging/QuoteContext.scala b/compiler/src/dotty/tools/dotc/staging/QuoteContext.scala new file mode 100644 index 000000000000..8e25bba7110c --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/QuoteContext.scala @@ -0,0 +1,34 @@ +package dotty.tools.dotc.staging + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.staging.StagingLevel.* + +object QuoteContext { + + /** A key to be used in a context property that tracks the quotation stack. + * Stack containing the Quotes references received by the surrounding quotes. + */ + private val QuotesStack = new Property.Key[List[tpd.Tree]] + + /** Context with an incremented quotation level and pushes a reference to a Quotes on the quote context stack */ + def pushQuotes(quotes: tpd.Tree)(using Context): Context = + val old = ctx.property(QuotesStack).getOrElse(List.empty) + quoteContext.setProperty(QuotesStack, quotes :: old) + + /** Context with a decremented quotation level and pops the Some of top of the quote context stack or None if the stack is empty. + * The quotation stack could be empty if we are in a top level splice or an erroneous splice directly within a top level splice. + */ + def popQuotes()(using Context): (Option[tpd.Tree], Context) = + val ctx1 = spliceContext + val head = + ctx.property(QuotesStack) match + case Some(x :: xs) => + ctx1.setProperty(QuotesStack, xs) + Some(x) + case _ => + None // Splice at level 0 or lower + (head, ctx1) +} diff --git a/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala new file mode 100644 index 000000000000..c4c9c611be3a --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala @@ -0,0 +1,52 @@ +package dotty.tools.dotc.staging + +import dotty.tools.dotc.ast.{tpd, untpd} +import dotty.tools.dotc.core.Annotations._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.NameKinds._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.util.Spans._ + +object QuoteTypeTags { + + private val TaggedTypes = new Property.Key[QuoteTypeTags] + + def contextWithQuoteTypeTags(taggedTypes: QuoteTypeTags)(using Context) = + ctx.fresh.setProperty(TaggedTypes, taggedTypes) + + def getQuoteTypeTags(using Context): QuoteTypeTags = + ctx.property(TaggedTypes).get +} + +class QuoteTypeTags(span: Span)(using Context) { + import tpd.* + + private val tags = collection.mutable.LinkedHashMap.empty[Symbol, TypeDef] + + def getTagRef(spliced: TermRef): TypeRef = { + val typeDef = tags.getOrElseUpdate(spliced.symbol, mkTagSymbolAndAssignType(spliced)) + typeDef.symbol.typeRef + } + + def getTypeTags: List[TypeDef] = tags.valuesIterator.toList + + private def mkTagSymbolAndAssignType(spliced: TermRef): TypeDef = { + val splicedTree = tpd.ref(spliced).withSpan(span) + val rhs = splicedTree.select(tpnme.Underlying).withSpan(span) + val alias = ctx.typeAssigner.assignType(untpd.TypeBoundsTree(rhs, rhs), rhs, rhs, EmptyTree) + val local = newSymbol( + owner = ctx.owner, + name = UniqueName.fresh((splicedTree.symbol.name.toString + "$_").toTermName).toTypeName, + flags = Synthetic, + info = TypeAlias(splicedTree.tpe.select(tpnme.Underlying)), + coord = span).asType + local.addAnnotation(Annotation(defn.QuotedRuntime_SplicedTypeAnnot, span)) + ctx.typeAssigner.assignType(untpd.TypeDef(local.name, alias), local) + } +} diff --git a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala new file mode 100644 index 000000000000..4704501e38ff --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala @@ -0,0 +1,48 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.util.SrcPos + +import scala.collection.mutable + +object StagingLevel { + + /** A key to be used in a context property that tracks the staging level */ + private val LevelKey = new Property.Key[Int] + + /** A key to be used in a context property that caches the `levelOf` mapping */ + private val LevelOfKey = new Property.Key[Map[Symbol, Int]] + + /** All enclosing calls that are currently inlined, from innermost to outermost. */ + def level(using Context): Int = + ctx.property(LevelKey).getOrElse(0) + + /** Context with an incremented staging level. */ + def quoteContext(using Context): FreshContext = + ctx.fresh.setProperty(LevelKey, level + 1) + + /** Context with a decremented staging level. */ + def spliceContext(using Context): FreshContext = + ctx.fresh.setProperty(LevelKey, level - 1) + + /** The quotation level of the definition of the locally defined symbol */ + def levelOf(sym: Symbol)(using Context): Int = + ctx.property(LevelOfKey) match + case Some(map) => map.getOrElse(sym, 0) + case None => 0 + + /** Context with the current staging level set for the symbols */ + def symbolsInCurrentLevel(syms: List[Symbol])(using Context): Context = + if level == 0 then ctx + else + val levelOfMap = ctx.property(LevelOfKey).getOrElse(Map.empty) + val syms1 = syms//.filter(sym => !levelOfMap.contains(sym)) + val newMap = syms1.foldLeft(levelOfMap)((acc, sym) => acc.updated(sym, level)) + ctx.fresh.setProperty(LevelOfKey, newMap) +} diff --git a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala new file mode 100644 index 000000000000..adaacafa764a --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala @@ -0,0 +1,116 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.ast.{TreeMapWithImplicits, tpd} +import dotty.tools.dotc.config.Printers.staging +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.staging.StagingLevel.* + +import scala.collection.mutable + +/** TreeMap that keeps track of staging levels using StagingLevel. */ +abstract class TreeMapWithStages extends TreeMapWithImplicits { + import tpd._ + + /** If we are inside a quote or a splice */ + private[this] var inQuoteOrSplice = false + + /** If we are inside a quote or a splice */ + protected def isInQuoteOrSplice: Boolean = inQuoteOrSplice + + /** Transform the quote `quote` which contains the quoted `body`. + * + * - `quoted.runtime.Expr.quote[T]()` --> `quoted.runtime.Expr.quote[T]()` + * - `quoted.Type.of[](quotes)` --> `quoted.Type.of[](quotes)` + */ + protected def transformQuotation(body: Tree, quote: Apply)(using Context): Tree = + if body.isTerm then + cpy.Apply(quote)(quote.fun, body :: Nil) + else + val TypeApply(fun, _) = quote.fun: @unchecked + cpy.Apply(quote)(cpy.TypeApply(quote.fun)(fun, body :: Nil), quote.args) + + /** Transform the expression splice `splice` which contains the spliced `body`. */ + protected def transformSplice(body: Tree, splice: Apply)(using Context): Tree + + /** Transform the type splice `splice` which contains the spliced `body`. */ + protected def transformSpliceType(body: Tree, splice: Select)(using Context): Tree + + override def transform(tree: Tree)(using Context): Tree = + if (tree.source != ctx.source && tree.source.exists) + transform(tree)(using ctx.withSource(tree.source)) + else reporting.trace(i"StagingTransformer.transform $tree at $level", staging, show = true) { + def dropEmptyBlocks(tree: Tree): Tree = tree match { + case Block(Nil, expr) => dropEmptyBlocks(expr) + case _ => tree + } + + tree match { + case Apply(Select(Quoted(quotedTree), _), _) if quotedTree.isType => + dropEmptyBlocks(quotedTree) match + case SplicedType(t) => + // Optimization: `quoted.Type.of[x.Underlying]` --> `x` + transform(t) + case _ => + super.transform(tree) + + case tree @ Quoted(quotedTree) => + val old = inQuoteOrSplice + inQuoteOrSplice = true + try dropEmptyBlocks(quotedTree) match { + case Spliced(t) => + // Optimization: `'{ $x }` --> `x` + // and adapt the refinement of `Quotes { type reflect: ... } ?=> Expr[T]` + transform(t).asInstance(tree.tpe) + case _ => transformQuotation(quotedTree, tree) + } + finally inQuoteOrSplice = old + + case tree @ Spliced(splicedTree) => + val old = inQuoteOrSplice + inQuoteOrSplice = true + try dropEmptyBlocks(splicedTree) match { + case Quoted(t) => + // Optimization: `${ 'x }` --> `x` + transform(t) + case _ => transformSplice(splicedTree, tree) + } + finally inQuoteOrSplice = old + + case tree @ SplicedType(splicedTree) => + val old = inQuoteOrSplice + inQuoteOrSplice = true + try transformSpliceType(splicedTree, tree) + finally inQuoteOrSplice = old + + case Block(stats, _) => + val defSyms = stats.collect { case defTree: DefTree => defTree.symbol } + super.transform(tree)(using symbolsInCurrentLevel(defSyms)) + + case CaseDef(pat, guard, body) => + super.transform(tree)(using symbolsInCurrentLevel(tpd.patVars(pat))) + + case (_:Import | _:Export) => + tree + + case _: Template => + val decls = tree.symbol.owner.info.decls.toList + super.transform(tree)(using symbolsInCurrentLevel(decls)) + + case LambdaTypeTree(tparams, body) => + super.transform(tree)(using symbolsInCurrentLevel(tparams.map(_.symbol))) + + case tree: DefTree => + val paramSyms = tree match + case tree: DefDef => tree.paramss.flatten.map(_.symbol) + case _ => Nil + super.transform(tree)(using symbolsInCurrentLevel(tree.symbol :: paramSyms)) + + case _ => + super.transform(tree) + } + } +} diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 5908bce97994..3175ffceae49 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -71,7 +71,7 @@ abstract class AccessProxies { def needsAccessor(sym: Symbol)(using Context): Boolean def ifNoHost(reference: RefTree)(using Context): Tree = { - assert(false, "no host found for $reference with ${reference.symbol.showLocated} from ${ctx.owner}") + assert(false, i"no host found for $reference with ${reference.symbol.showLocated} from ${ctx.owner}") reference } @@ -80,6 +80,8 @@ abstract class AccessProxies { val sym = newSymbol(owner, name, Synthetic | Method, info, coord = accessed.span).entered if accessed.is(Private) then sym.setFlag(Final) else if sym.allOverriddenSymbols.exists(!_.is(Deferred)) then sym.setFlag(Override) + if accessed.hasAnnotation(defn.ExperimentalAnnot) then + sym.addAnnotation(defn.ExperimentalAnnot) sym } diff --git a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala index 0d464d319848..0c1f40d4f2bd 100644 --- a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala +++ b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala @@ -5,7 +5,8 @@ import core._ import ast.tpd._ import Annotations._ import Contexts._ -import Symbols.newSymbol +import Symbols.* +import SymUtils.* import Decorators._ import Flags._ import Names._ @@ -23,8 +24,6 @@ class BeanProperties(thisPhase: DenotTransformer): } ::: origBody) def generateAccessors(valDef: ValDef)(using Context): List[Tree] = - import Symbols.defn - def generateGetter(valDef: ValDef, annot: Annotation)(using Context) : Tree = val prefix = if annot matches defn.BooleanBeanPropertyAnnot then "is" else "get" val meth = newSymbol( @@ -34,9 +33,9 @@ class BeanProperties(thisPhase: DenotTransformer): info = MethodType(Nil, valDef.denot.info), coord = annot.tree.span ).enteredAfter(thisPhase).asTerm - meth.addAnnotations(valDef.symbol.annotations) + .withAnnotationsCarrying(valDef.symbol, defn.BeanGetterMetaAnnot) val body: Tree = ref(valDef.symbol) - DefDef(meth, body) + DefDef(meth, body).withSpan(meth.span) def maybeGenerateSetter(valDef: ValDef, annot: Annotation)(using Context): Option[Tree] = Option.when(valDef.denot.asSymDenotation.flags.is(Mutable)) { @@ -48,9 +47,9 @@ class BeanProperties(thisPhase: DenotTransformer): info = MethodType(valDef.name :: Nil, valDef.denot.info :: Nil, defn.UnitType), coord = annot.tree.span ).enteredAfter(thisPhase).asTerm - meth.addAnnotations(valDef.symbol.annotations) + .withAnnotationsCarrying(valDef.symbol, defn.BeanSetterMetaAnnot) def body(params: List[List[Tree]]): Tree = Assign(ref(valDef.symbol), params.head.head) - DefDef(meth, body) + DefDef(meth, body).withSpan(meth.span) } def prefixedName(prefix: String, valName: Name) = diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index 90c0207ebb6d..b8cbb4367db4 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -9,15 +9,18 @@ import Symbols._, Contexts._, Types._, Decorators._ import StdNames.nme import ast.TreeTypeMap +import scala.collection.mutable.ListBuffer + /** Rewrite an application * - * (((x1, ..., xn) => b): T)(y1, ..., yn) + * (([X1, ..., Xm] => (x1, ..., xn) => b): T)[T1, ..., Tm](y1, ..., yn) * * where * * - all yi are pure references without a prefix * - the closure can also be contextual or erased, but cannot be a SAM type - * _ the type ascription ...: T is optional + * - the type parameters Xi and type arguments Ti are optional + * - the type ascription ...: T is optional * * to * @@ -36,14 +39,10 @@ class BetaReduce extends MiniPhase: override def description: String = BetaReduce.description - override def transformApply(app: Apply)(using Context): Tree = app.fun match - case Select(fn, nme.apply) if defn.isFunctionType(fn.tpe) => - val app1 = BetaReduce(app, fn, app.args) - if app1 ne app then report.log(i"beta reduce $app -> $app1") - app1 - case _ => - app - + override def transformApply(app: Apply)(using Context): Tree = + val app1 = BetaReduce(app) + if app1 ne app then report.log(i"beta reduce $app -> $app1") + app1 object BetaReduce: import ast.tpd._ @@ -51,30 +50,77 @@ object BetaReduce: val name: String = "betaReduce" val description: String = "reduce closure applications" - /** Beta-reduces a call to `fn` with arguments `argSyms` or returns `tree` */ - def apply(original: Tree, fn: Tree, args: List[Tree])(using Context): Tree = - fn match - case Typed(expr, _) => - BetaReduce(original, expr, args) - case Block((anonFun: DefDef) :: Nil, closure: Closure) => - BetaReduce(anonFun, args) - case Block(stats, expr) => - val tree = BetaReduce(original, expr, args) - if tree eq original then original - else cpy.Block(fn)(stats, tree) - case Inlined(call, bindings, expr) => - val tree = BetaReduce(original, expr, args) - if tree eq original then original - else cpy.Inlined(fn)(call, bindings, tree) + /** Rewrite an application + * + * ((x1, ..., xn) => b)(e1, ..., en) + * + * to + * + * val/def x1 = e1; ...; val/def xn = en; b + * + * where `def` is used for call-by-name parameters. However, we shortcut any NoPrefix + * refs among the ei's directly without creating an intermediate binding. + * + * Similarly, rewrites type applications + * + * ([X1, ..., Xm] => (x1, ..., xn) => b).apply[T1, .., Tm](e1, ..., en) + * + * to + * + * type X1 = T1; ...; type Xm = Tm;val/def x1 = e1; ...; val/def xn = en; b + * + * This beta-reduction preserves the integrity of `Inlined` tree nodes. + */ + def apply(tree: Tree)(using Context): Tree = + val bindingsBuf = new ListBuffer[DefTree] + def recur(fn: Tree, argss: List[List[Tree]]): Option[Tree] = fn match + case Block((ddef : DefDef) :: Nil, closure: Closure) if ddef.symbol == closure.meth.symbol => + Some(reduceApplication(ddef, argss, bindingsBuf)) + case Block((TypeDef(_, template: Template)) :: Nil, Typed(Apply(Select(New(_), _), _), _)) if template.constr.rhs.isEmpty => + template.body match + case (ddef: DefDef) :: Nil => Some(reduceApplication(ddef, argss, bindingsBuf)) + case _ => None + case Block(stats, expr) if stats.forall(isPureBinding) => + recur(expr, argss).map(cpy.Block(fn)(stats, _)) + case Inlined(call, bindings, expr) if bindings.forall(isPureBinding) => + recur(expr, argss).map(cpy.Inlined(fn)(call, bindings, _)) + case Typed(expr, tpt) => + recur(expr, argss) + case TypeApply(Select(expr, nme.asInstanceOfPM), List(tpt)) => + recur(expr, argss) + case _ => None + tree match + case Apply(Select(fn, nme.apply), args) if defn.isFunctionType(fn.tpe) => + recur(fn, List(args)) match + case Some(reduced) => + seq(bindingsBuf.result(), reduced).withSpan(tree.span) + case None => + tree + case Apply(TypeApply(Select(fn, nme.apply), targs), args) if fn.tpe.typeSymbol eq dotc.core.Symbols.defn.PolyFunctionClass => + recur(fn, List(targs, args)) match + case Some(reduced) => + seq(bindingsBuf.result(), reduced).withSpan(tree.span) + case None => + tree case _ => - original - end apply + tree + + /** Beta-reduces a call to `ddef` with arguments `args` and registers new bindings */ + def reduceApplication(ddef: DefDef, argss: List[List[Tree]], bindings: ListBuffer[DefTree])(using Context): Tree = + val (targs, args) = argss.flatten.partition(_.isType) + val tparams = ddef.leadingTypeParams + val vparams = ddef.termParamss.flatten + + val targSyms = + for (targ, tparam) <- targs.zip(tparams) yield + targ.tpe.dealias match + case ref @ TypeRef(NoPrefix, _) => + ref.symbol + case _ => + val binding = TypeDef(newSymbol(ctx.owner, tparam.name, EmptyFlags, TypeAlias(targ.tpe), coord = targ.span)).withSpan(targ.span) + bindings += binding + binding.symbol - /** Beta-reduces a call to `ddef` with arguments `argSyms` */ - def apply(ddef: DefDef, args: List[Tree])(using Context) = - val bindings = List.newBuilder[ValDef] - val vparams = ddef.termParamss.iterator.flatten.toList - assert(args.hasSameLengthAs(vparams)) val argSyms = for (arg, param) <- args.zip(vparams) yield arg.tpe.dealias match @@ -82,16 +128,20 @@ object BetaReduce: ref.symbol case _ => val flags = Synthetic | (param.symbol.flags & Erased) - val tpe = if arg.tpe.dealias.isInstanceOf[ConstantType] then arg.tpe.dealias else arg.tpe.widen + val tpe = + if arg.tpe.isBottomType then param.tpe.widenTermRefExpr + else if arg.tpe.dealias.isInstanceOf[ConstantType] then arg.tpe.dealias + else arg.tpe.widen val binding = ValDef(newSymbol(ctx.owner, param.name, flags, tpe, coord = arg.span), arg).withSpan(arg.span) - bindings += binding + if !(tpe.isInstanceOf[ConstantType] && isPureExpr(arg)) then + bindings += binding binding.symbol val expansion = TreeTypeMap( oldOwners = ddef.symbol :: Nil, newOwners = ctx.owner :: Nil, - substFrom = vparams.map(_.symbol), - substTo = argSyms + substFrom = (tparams ::: vparams).map(_.symbol), + substTo = targSyms ::: argSyms ).transform(ddef.rhs) val expansion1 = new TreeMap { @@ -99,8 +149,5 @@ object BetaReduce: case ConstantType(const) if isPureExpr(tree) => cpy.Literal(tree)(const) case _ => super.transform(tree) }.transform(expansion) - val bindings1 = - bindings.result().filterNot(vdef => vdef.tpt.tpe.isInstanceOf[ConstantType] && isPureExpr(vdef.rhs)) - seq(bindings1, expansion1) - end apply + expansion1 diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index e302170991f9..569b16681cde 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -129,9 +129,12 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { assert(ctx.typer.isInstanceOf[Erasure.Typer]) ctx.typer.typed(untpd.cpy.Apply(ref)(ref, args), member.info.finalResultType) else - val defn.ContextFunctionType(argTypes, resType, isErased) = tp: @unchecked + val defn.ContextFunctionType(argTypes, resType, erasedParams) = tp: @unchecked val anonFun = newAnonFun(ctx.owner, - MethodType(if isErased then Nil else argTypes, resType), + MethodType( + argTypes.zip(erasedParams.padTo(argTypes.length, false)) + .flatMap((t, e) => if e then None else Some(t)), + resType), coord = ctx.owner.coord) anonFun.info = transformInfo(anonFun, anonFun.info) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index 6b0a4c3e9737..b63773687f74 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -67,8 +67,8 @@ class CheckReentrant extends MiniPhase { if (sym.isTerm && !sym.isSetter && !isIgnored(sym)) if (sym.is(Mutable)) { report.error( - i"""possible data race involving globally reachable ${sym.showLocated}: ${sym.info} - | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""") + em"""possible data race involving globally reachable ${sym.showLocated}: ${sym.info} + | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""") shared += sym } else if (!sym.is(Method) || sym.isOneOf(Accessor | ParamAccessor)) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala new file mode 100644 index 000000000000..5a178ff2ec1f --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -0,0 +1,709 @@ +package dotty.tools.dotc.transform + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.tpd.{Inlined, TreeTraverser} +import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.ast.untpd.ImportSelector +import dotty.tools.dotc.config.ScalaSettings +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.{em, i} +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.report +import dotty.tools.dotc.reporting.Message +import dotty.tools.dotc.typer.ImportInfo +import dotty.tools.dotc.util.{Property, SrcPos} +import dotty.tools.dotc.core.Mode +import dotty.tools.dotc.core.Types.{AnnotatedType, ConstantType, NoType, TermRef, Type, TypeTraverser} +import dotty.tools.dotc.core.Flags.flagsString +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.Names.Name +import dotty.tools.dotc.transform.MegaPhase.MiniPhase +import dotty.tools.dotc.core.Annotations +import dotty.tools.dotc.core.Definitions +import dotty.tools.dotc.core.NameKinds.WildcardParamName +import dotty.tools.dotc.core.Symbols.Symbol +import dotty.tools.dotc.core.StdNames.nme + + +/** + * A compiler phase that checks for unused imports or definitions + * + * Basically, it gathers definition/imports and their usage. If a + * definition/imports does not have any usage, then it is reported. + */ +class CheckUnused extends MiniPhase: + import CheckUnused.UnusedData + + /** + * The key used to retrieve the "unused entity" analysis metadata, + * from the compilation `Context` + */ + private val _key = Property.Key[UnusedData] + + private def unusedDataApply[U](f: UnusedData => U)(using Context): Context = + ctx.property(_key).foreach(f) + ctx + private def getUnusedData(using Context): Option[UnusedData] = + ctx.property(_key) + + override def phaseName: String = CheckUnused.phaseName + + override def description: String = CheckUnused.description + + override def isRunnable(using Context): Boolean = + ctx.settings.Wunused.value.nonEmpty && + !ctx.isJava + + // ========== SETUP ============ + + override def prepareForUnit(tree: tpd.Tree)(using Context): Context = + val data = UnusedData() + val fresh = ctx.fresh.setProperty(_key, data) + fresh + + // ========== END + REPORTING ========== + + override def transformUnit(tree: tpd.Tree)(using Context): tpd.Tree = + unusedDataApply(ud => reportUnused(ud.getUnused)) + tree + + // ========== MiniPhase Prepare ========== + override def prepareForOther(tree: tpd.Tree)(using Context): Context = + // A standard tree traverser covers cases not handled by the Mega/MiniPhase + traverser.traverse(tree) + ctx + + override def prepareForInlined(tree: tpd.Inlined)(using Context): Context = + traverser.traverse(tree.call) + ctx + + override def prepareForIdent(tree: tpd.Ident)(using Context): Context = + if tree.symbol.exists then + val prefixes = LazyList.iterate(tree.typeOpt.normalizedPrefix)(_.normalizedPrefix).takeWhile(_ != NoType) + .take(10) // Failsafe for the odd case if there was an infinite cycle + for prefix <- prefixes do + unusedDataApply(_.registerUsed(prefix.classSymbol, None)) + unusedDataApply(_.registerUsed(tree.symbol, Some(tree.name))) + else if tree.hasType then + unusedDataApply(_.registerUsed(tree.tpe.classSymbol, Some(tree.name))) + else + ctx + + override def prepareForSelect(tree: tpd.Select)(using Context): Context = + unusedDataApply(_.registerUsed(tree.symbol, Some(tree.name))) + + override def prepareForBlock(tree: tpd.Block)(using Context): Context = + pushInBlockTemplatePackageDef(tree) + + override def prepareForTemplate(tree: tpd.Template)(using Context): Context = + pushInBlockTemplatePackageDef(tree) + + override def prepareForPackageDef(tree: tpd.PackageDef)(using Context): Context = + pushInBlockTemplatePackageDef(tree) + + override def prepareForValDef(tree: tpd.ValDef)(using Context): Context = + unusedDataApply{ud => + // do not register the ValDef generated for `object` + traverseAnnotations(tree.symbol) + if !tree.symbol.is(Module) then + ud.registerDef(tree) + if tree.name.mangledString.startsWith(nme.derived.mangledString + "$") + && tree.typeOpt != NoType then + ud.registerUsed(tree.typeOpt.typeSymbol, None, true) + ud.addIgnoredUsage(tree.symbol) + } + + override def prepareForDefDef(tree: tpd.DefDef)(using Context): Context = + unusedDataApply{ ud => + import ud.registerTrivial + tree.registerTrivial + traverseAnnotations(tree.symbol) + ud.registerDef(tree) + ud.addIgnoredUsage(tree.symbol) + } + + override def prepareForTypeDef(tree: tpd.TypeDef)(using Context): Context = + unusedDataApply{ ud => + if !tree.symbol.is(Param) then // Ignore type parameter (as Scala 2) + traverseAnnotations(tree.symbol) + ud.registerDef(tree) + ud.addIgnoredUsage(tree.symbol) + } + + override def prepareForBind(tree: tpd.Bind)(using Context): Context = + traverseAnnotations(tree.symbol) + unusedDataApply(_.registerPatVar(tree)) + + override def prepareForTypeTree(tree: tpd.TypeTree)(using Context): Context = + if !tree.isInstanceOf[tpd.InferredTypeTree] then typeTraverser(unusedDataApply).traverse(tree.tpe) + ctx + + // ========== MiniPhase Transform ========== + + override def transformBlock(tree: tpd.Block)(using Context): tpd.Tree = + popOutBlockTemplatePackageDef() + tree + + override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = + popOutBlockTemplatePackageDef() + tree + + override def transformPackageDef(tree: tpd.PackageDef)(using Context): tpd.Tree = + popOutBlockTemplatePackageDef() + tree + + override def transformValDef(tree: tpd.ValDef)(using Context): tpd.Tree = + unusedDataApply(_.removeIgnoredUsage(tree.symbol)) + tree + + override def transformDefDef(tree: tpd.DefDef)(using Context): tpd.Tree = + unusedDataApply(_.removeIgnoredUsage(tree.symbol)) + tree + + override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree = + unusedDataApply(_.removeIgnoredUsage(tree.symbol)) + tree + + // ---------- MiniPhase HELPERS ----------- + + private def pushInBlockTemplatePackageDef(tree: tpd.Block | tpd.Template | tpd.PackageDef)(using Context): Context = + unusedDataApply { ud => + ud.pushScope(UnusedData.ScopeType.fromTree(tree)) + } + ctx + + private def popOutBlockTemplatePackageDef()(using Context): Context = + unusedDataApply { ud => + ud.popScope() + } + ctx + + private def newCtx(tree: tpd.Tree)(using Context) = + if tree.symbol.exists then ctx.withOwner(tree.symbol) else ctx + + /** + * This traverse is the **main** component of this phase + * + * It traverse the tree the tree and gather the data in the + * corresponding context property + */ + private def traverser = new TreeTraverser: + import tpd._ + import UnusedData.ScopeType + + /* Register every imports, definition and usage */ + override def traverse(tree: tpd.Tree)(using Context): Unit = + val newCtx = if tree.symbol.exists then ctx.withOwner(tree.symbol) else ctx + tree match + case imp:tpd.Import => + unusedDataApply(_.registerImport(imp)) + traverseChildren(tree)(using newCtx) + case ident: Ident => + prepareForIdent(ident) + traverseChildren(tree)(using newCtx) + case sel: Select => + prepareForSelect(sel) + traverseChildren(tree)(using newCtx) + case _: (tpd.Block | tpd.Template | tpd.PackageDef) => + //! DIFFERS FROM MINIPHASE + unusedDataApply { ud => + ud.inNewScope(ScopeType.fromTree(tree))(traverseChildren(tree)(using newCtx)) + } + case t:tpd.ValDef => + prepareForValDef(t) + traverseChildren(tree)(using newCtx) + transformValDef(t) + case t:tpd.DefDef => + prepareForDefDef(t) + traverseChildren(tree)(using newCtx) + transformDefDef(t) + case t:tpd.TypeDef => + prepareForTypeDef(t) + traverseChildren(tree)(using newCtx) + transformTypeDef(t) + case t: tpd.Bind => + prepareForBind(t) + traverseChildren(tree)(using newCtx) + case _: tpd.InferredTypeTree => + case t@tpd.TypeTree() => + //! DIFFERS FROM MINIPHASE + typeTraverser(unusedDataApply).traverse(t.tpe) + traverseChildren(tree)(using newCtx) + case _ => + //! DIFFERS FROM MINIPHASE + traverseChildren(tree)(using newCtx) + end traverse + end traverser + + /** This is a type traverser which catch some special Types not traversed by the term traverser above */ + private def typeTraverser(dt: (UnusedData => Any) => Unit)(using Context) = new TypeTraverser: + override def traverse(tp: Type): Unit = + if tp.typeSymbol.exists then dt(_.registerUsed(tp.typeSymbol, Some(tp.typeSymbol.name))) + tp match + case AnnotatedType(_, annot) => + dt(_.registerUsed(annot.symbol, None)) + traverseChildren(tp) + case _ => + traverseChildren(tp) + + /** This traverse the annotations of the symbol */ + private def traverseAnnotations(sym: Symbol)(using Context): Unit = + sym.denot.annotations.foreach(annot => traverser.traverse(annot.tree)) + + /** Do the actual reporting given the result of the anaylsis */ + private def reportUnused(res: UnusedData.UnusedResult)(using Context): Unit = + import CheckUnused.WarnTypes + res.warnings.foreach { s => + s match + case (t, WarnTypes.Imports) => + report.warning(s"unused import", t) + case (t, WarnTypes.LocalDefs) => + report.warning(s"unused local definition", t) + case (t, WarnTypes.ExplicitParams) => + report.warning(s"unused explicit parameter", t) + case (t, WarnTypes.ImplicitParams) => + report.warning(s"unused implicit parameter", t) + case (t, WarnTypes.PrivateMembers) => + report.warning(s"unused private member", t) + case (t, WarnTypes.PatVars) => + report.warning(s"unused pattern variable", t) + } + +end CheckUnused + +object CheckUnused: + val phaseName: String = "checkUnused" + val description: String = "check for unused elements" + + private enum WarnTypes: + case Imports + case LocalDefs + case ExplicitParams + case ImplicitParams + case PrivateMembers + case PatVars + + /** + * A stateful class gathering the infos on : + * - imports + * - definitions + * - usage + */ + private class UnusedData: + import dotty.tools.dotc.transform.CheckUnused.UnusedData.UnusedResult + import collection.mutable.{Set => MutSet, Map => MutMap, Stack => MutStack} + import UnusedData.ScopeType + + /** The current scope during the tree traversal */ + var currScopeType: MutStack[ScopeType] = MutStack(ScopeType.Other) + + /* IMPORTS */ + private val impInScope = MutStack(MutSet[tpd.Import]()) + /** + * We store the symbol along with their accessibility without import. + * Accessibility to their definition in outer context/scope + * + * See the `isAccessibleAsIdent` extension method below in the file + */ + private val usedInScope = MutStack(MutSet[(Symbol,Boolean, Option[Name], Boolean)]()) + private val usedInPosition = MutSet[(SrcPos, Name)]() + /* unused import collected during traversal */ + private val unusedImport = MutSet[ImportSelector]() + + /* LOCAL DEF OR VAL / Private Def or Val / Pattern variables */ + private val localDefInScope = MutSet[tpd.MemberDef]() + private val privateDefInScope = MutSet[tpd.MemberDef]() + private val explicitParamInScope = MutSet[tpd.MemberDef]() + private val implicitParamInScope = MutSet[tpd.MemberDef]() + private val patVarsInScope = MutSet[tpd.Bind]() + + /* Unused collection collected at the end */ + private val unusedLocalDef = MutSet[tpd.MemberDef]() + private val unusedPrivateDef = MutSet[tpd.MemberDef]() + private val unusedExplicitParams = MutSet[tpd.MemberDef]() + private val unusedImplicitParams = MutSet[tpd.MemberDef]() + private val unusedPatVars = MutSet[tpd.Bind]() + + /** All used symbols */ + private val usedDef = MutSet[Symbol]() + /** Do not register as used */ + private val doNotRegister = MutSet[Symbol]() + + /** Trivial definitions, avoid registering params */ + private val trivialDefs = MutSet[Symbol]() + + /** + * Push a new Scope of the given type, executes the given Unit and + * pop it back to the original type. + */ + def inNewScope(newScope: ScopeType)(execInNewScope: => Unit)(using Context): Unit = + val prev = currScopeType + pushScope(newScope) + execInNewScope + popScope() + + /** + * Register a found (used) symbol along with its name + * + * The optional name will be used to target the right import + * as the same element can be imported with different renaming + */ + def registerUsed(sym: Symbol, name: Option[Name], isDerived: Boolean = false)(using Context): Unit = + if !isConstructorOfSynth(sym) && !doNotRegister(sym) then + if sym.isConstructor && sym.exists then + registerUsed(sym.owner, None) // constructor are "implicitly" imported with the class + else + usedInScope.top += ((sym, sym.isAccessibleAsIdent, name, isDerived)) + usedInScope.top += ((sym.companionModule, sym.isAccessibleAsIdent, name, isDerived)) + usedInScope.top += ((sym.companionClass, sym.isAccessibleAsIdent, name, isDerived)) + name.map(n => usedInPosition += ((sym.sourcePos, n))) + + /** Register a symbol that should be ignored */ + def addIgnoredUsage(sym: Symbol)(using Context): Unit = + doNotRegister ++= sym.everySymbol + + /** Remove a symbol that shouldn't be ignored anymore */ + def removeIgnoredUsage(sym: Symbol)(using Context): Unit = + doNotRegister --= sym.everySymbol + + + /** Register an import */ + def registerImport(imp: tpd.Import)(using Context): Unit = + if !tpd.languageImport(imp.expr).nonEmpty && !imp.isGeneratedByEnum && !isTransparentAndInline(imp) then + impInScope.top += imp + unusedImport ++= imp.selectors.filter { s => + !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) + } + + /** Register (or not) some `val` or `def` according to the context, scope and flags */ + def registerDef(memDef: tpd.MemberDef)(using Context): Unit = + if memDef.isValidMemberDef then + if memDef.isValidParam then + if memDef.symbol.isOneOf(GivenOrImplicit) then + implicitParamInScope += memDef + else + explicitParamInScope += memDef + else if currScopeType.top == ScopeType.Local then + localDefInScope += memDef + else if memDef.shouldReportPrivateDef then + privateDefInScope += memDef + + /** Register pattern variable */ + def registerPatVar(patvar: tpd.Bind)(using Context): Unit = + if !patvar.symbol.isUnusedAnnot then + patVarsInScope += patvar + + /** enter a new scope */ + def pushScope(newScopeType: ScopeType): Unit = + // unused imports : + currScopeType.push(newScopeType) + impInScope.push(MutSet()) + usedInScope.push(MutSet()) + + /** + * leave the current scope and do : + * + * - If there are imports in this scope check for unused ones + */ + def popScope()(using Context): Unit = + // used symbol in this scope + val used = usedInScope.pop().toSet + // used imports in this scope + val imports = impInScope.pop() + val kept = used.filterNot { t => + val (sym, isAccessible, optName, isDerived) = t + // keep the symbol for outer scope, if it matches **no** import + // This is the first matching wildcard selector + var selWildCard: Option[ImportSelector] = None + + val exists = imports.exists { imp => + sym.isInImport(imp, isAccessible, optName, isDerived) match + case None => false + case optSel@Some(sel) if sel.isWildcard => + if selWildCard.isEmpty then selWildCard = optSel + // We keep wildcard symbol for the end as they have the least precedence + false + case Some(sel) => + unusedImport -= sel + true + } + if !exists && selWildCard.isDefined then + unusedImport -= selWildCard.get + true // a matching import exists so the symbol won't be kept for outer scope + else + exists + } + + // if there's an outer scope + if usedInScope.nonEmpty then + // we keep the symbols not referencing an import in this scope + // as it can be the only reference to an outer import + usedInScope.top ++= kept + // register usage in this scope for other warnings at the end of the phase + usedDef ++= used.map(_._1) + // retrieve previous scope type + currScopeType.pop + end popScope + + /** + * Leave the scope and return a `List` of unused `ImportSelector`s + * + * The given `List` is sorted by line and then column of the position + */ + def getUnused(using Context): UnusedResult = + popScope() + + val sortedImp = + if ctx.settings.WunusedHas.imports || ctx.settings.WunusedHas.strictNoImplicitWarn then + unusedImport.map(d => d.srcPos -> WarnTypes.Imports).toList + else + Nil + val sortedLocalDefs = + if ctx.settings.WunusedHas.locals then + localDefInScope + .filterNot(d => d.symbol.usedDefContains) + .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .map(d => d.namePos -> WarnTypes.LocalDefs).toList + else + Nil + val sortedExplicitParams = + if ctx.settings.WunusedHas.explicits then + explicitParamInScope + .filterNot(d => d.symbol.usedDefContains) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .map(d => d.namePos -> WarnTypes.ExplicitParams).toList + else + Nil + val sortedImplicitParams = + if ctx.settings.WunusedHas.implicits then + implicitParamInScope + .filterNot(d => d.symbol.usedDefContains) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .map(d => d.namePos -> WarnTypes.ImplicitParams).toList + else + Nil + val sortedPrivateDefs = + if ctx.settings.WunusedHas.privates then + privateDefInScope + .filterNot(d => d.symbol.usedDefContains) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .map(d => d.namePos -> WarnTypes.PrivateMembers).toList + else + Nil + val sortedPatVars = + if ctx.settings.WunusedHas.patvars then + patVarsInScope + .filterNot(d => d.symbol.usedDefContains) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .map(d => d.namePos -> WarnTypes.PatVars).toList + else + Nil + val warnings = List(sortedImp, sortedLocalDefs, sortedExplicitParams, sortedImplicitParams, sortedPrivateDefs, sortedPatVars).flatten.sortBy { s => + val pos = s._1.sourcePos + (pos.line, pos.column) + } + UnusedResult(warnings, Nil) + end getUnused + //============================ HELPERS ==================================== + + + /** + * Checks if import selects a def that is transparent and inline + */ + private def isTransparentAndInline(imp: tpd.Import)(using Context): Boolean = + imp.selectors.exists { sel => + val qual = imp.expr + val importedMembers = qual.tpe.member(sel.name).alternatives.map(_.symbol) + importedMembers.exists(s => s.is(Transparent) && s.is(Inline)) + } + + /** + * Heuristic to detect synthetic suffixes in names of symbols + */ + private def containsSyntheticSuffix(symbol: Symbol)(using Context): Boolean = + symbol.name.mangledString.contains("$") + + /** + * Is the the constructor of synthetic package object + * Should be ignored as it is always imported/used in package + * Trigger false negative on used import + * + * Without this check example: + * + * --- WITH PACKAGE : WRONG --- + * {{{ + * package a: + * val x: Int = 0 + * package b: + * import a._ // no warning + * }}} + * --- WITH OBJECT : OK --- + * {{{ + * object a: + * val x: Int = 0 + * object b: + * import a._ // unused warning + * }}} + */ + private def isConstructorOfSynth(sym: Symbol)(using Context): Boolean = + sym.exists && sym.isConstructor && sym.owner.isPackageObject && sym.owner.is(Synthetic) + + /** + * This is used to avoid reporting the parameters of the synthetic main method + * generated by `@main` + */ + private def isSyntheticMainParam(sym: Symbol)(using Context): Boolean = + sym.exists && ctx.platform.isMainMethod(sym.owner) && sym.owner.is(Synthetic) + + /** + * This is used to ignore exclusion imports (i.e. import `qual`.{`member` => _}) + */ + private def isImportExclusion(sel: ImportSelector): Boolean = sel.renamed match + case untpd.Ident(name) => name == StdNames.nme.WILDCARD + case _ => false + + /** + * If -Wunused:strict-no-implicit-warn import and this import selector could potentially import implicit. + * return true + */ + private def shouldSelectorBeReported(imp: tpd.Import, sel: ImportSelector)(using Context): Boolean = + if ctx.settings.WunusedHas.strictNoImplicitWarn then + sel.isWildcard || + imp.expr.tpe.member(sel.name.toTermName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit)) || + imp.expr.tpe.member(sel.name.toTypeName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit)) + else + false + + extension (sym: Symbol) + /** is accessible without import in current context */ + private def isAccessibleAsIdent(using Context): Boolean = + sym.exists && + ctx.outersIterator.exists{ c => + c.owner == sym.owner + || sym.owner.isClass && c.owner.isClass + && c.owner.thisType.baseClasses.contains(sym.owner) + && c.owner.thisType.member(sym.name).alternatives.contains(sym) + } + + /** Given an import and accessibility, return an option of selector that match import<->symbol */ + private def isInImport(imp: tpd.Import, isAccessible: Boolean, symName: Option[Name], isDerived: Boolean)(using Context): Option[ImportSelector] = + val tpd.Import(qual, sels) = imp + val dealiasedSym = dealias(sym) + val simpleSelections = qual.tpe.member(sym.name).alternatives + val typeSelections = sels.flatMap(n => qual.tpe.member(n.name.toTypeName).alternatives) + val termSelections = sels.flatMap(n => qual.tpe.member(n.name.toTermName).alternatives) + val selectionsToDealias = typeSelections ::: termSelections + val qualHasSymbol = simpleSelections.map(_.symbol).contains(sym) || (simpleSelections ::: selectionsToDealias).map(_.symbol).map(dealias).contains(dealiasedSym) + def selector = sels.find(sel => (sel.name.toTermName == sym.name || sel.name.toTypeName == sym.name) && symName.map(n => n.toTermName == sel.rename).getOrElse(true)) + def dealiasedSelector = if(isDerived) sels.flatMap(sel => selectionsToDealias.map(m => (sel, m.symbol))).collect { + case (sel, sym) if dealias(sym) == dealiasedSym => sel + }.headOption else None + def wildcard = sels.find(sel => sel.isWildcard && ((sym.is(Given) == sel.isGiven) || sym.is(Implicit))) + if qualHasSymbol && !isAccessible && sym.exists then + selector.orElse(dealiasedSelector).orElse(wildcard) // selector with name or wildcard (or given) + else + None + + + private def dealias(symbol: Symbol)(using Context): Symbol = + if(symbol.isType && symbol.asType.denot.isAliasType) then + symbol.asType.typeRef.dealias.typeSymbol + else symbol + /** Annotated with @unused */ + private def isUnusedAnnot(using Context): Boolean = + sym.annotations.exists(a => a.symbol == ctx.definitions.UnusedAnnot) + + private def shouldNotReportParamOwner(using Context): Boolean = + if sym.exists then + val owner = sym.owner + trivialDefs(owner) || // is a trivial def + owner.isPrimaryConstructor || + owner.annotations.exists ( // @depreacated + _.symbol == ctx.definitions.DeprecatedAnnot + ) || + owner.isAllOf(Synthetic | PrivateLocal) || + owner.is(Accessor) || + owner.isOverriden + else + false + + private def usedDefContains(using Context): Boolean = + sym.everySymbol.exists(usedDef.apply) + + private def everySymbol(using Context): List[Symbol] = + List(sym, sym.companionClass, sym.companionModule, sym.moduleClass).filter(_.exists) + + /** A function is overriden. Either has `override flags` or parent has a matching member (type and name) */ + private def isOverriden(using Context): Boolean = + sym.is(Flags.Override) || + (if sym.exists then sym.owner.thisType.parents.exists(p => sym.matchingMember(p).exists) else false) + + end extension + + extension (defdef: tpd.DefDef) + // so trivial that it never consumes params + private def isTrivial(using Context): Boolean = + val rhs = defdef.rhs + rhs.symbol == ctx.definitions.Predef_undefined || + rhs.tpe =:= ctx.definitions.NothingType || + defdef.symbol.is(Deferred) || + (rhs match { + case _: tpd.Literal => true + case _ => rhs.tpe match + case ConstantType(_) => true + case tp: TermRef => + // Detect Scala 2 SingleType + tp.underlying.classSymbol.is(Flags.Module) + case _ => + false + }) + def registerTrivial(using Context): Unit = + if defdef.isTrivial then + trivialDefs += defdef.symbol + + extension (memDef: tpd.MemberDef) + private def isValidMemberDef(using Context): Boolean = + !memDef.symbol.isUnusedAnnot && !memDef.symbol.isAllOf(Flags.AccessorCreationFlags) && !memDef.name.isWildcard && !memDef.symbol.owner.is(Extension) + + private def isValidParam(using Context): Boolean = + val sym = memDef.symbol + (sym.is(Param) || sym.isAllOf(PrivateParamAccessor | Local, butNot = CaseAccessor)) && + !isSyntheticMainParam(sym) && + !sym.shouldNotReportParamOwner + + + private def shouldReportPrivateDef(using Context): Boolean = + currScopeType.top == ScopeType.Template && !memDef.symbol.isConstructor && memDef.symbol.is(Private, butNot = SelfName | Synthetic | CaseAccessor) + + extension (imp: tpd.Import) + /** Enum generate an import for its cases (but outside them), which should be ignored */ + def isGeneratedByEnum(using Context): Boolean = + imp.symbol.exists && imp.symbol.owner.is(Flags.Enum, butNot = Flags.Case) + + extension (thisName: Name) + private def isWildcard: Boolean = + thisName == StdNames.nme.WILDCARD || thisName.is(WildcardParamName) + + end UnusedData + + private object UnusedData: + enum ScopeType: + case Local + case Template + case Other + + object ScopeType: + /** return the scope corresponding to the enclosing scope of the given tree */ + def fromTree(tree: tpd.Tree): ScopeType = tree match + case _:tpd.Template => Template + case _:tpd.Block => Local + case _ => Other + + /** A container for the results of the used elements analysis */ + case class UnusedResult(warnings: List[(dotty.tools.dotc.util.SrcPos, WarnTypes)], usedImports: List[(tpd.Import, untpd.ImportSelector)]) +end CheckUnused + diff --git a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala index be454281bcbb..b7e8ccf4e7e1 100644 --- a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala +++ b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala @@ -80,7 +80,7 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => parents.map { case app @ Apply(fn, args0) if fn.symbol.owner == targetCls => if args0.nonEmpty && targetCls == defn.JavaEnumClass then - report.error("the constructor of java.lang.Enum cannot be called explicitly", app.sourcePos) + report.error(em"the constructor of java.lang.Enum cannot be called explicitly", app.sourcePos) cpy.Apply(app)(fn, args0 ++ args) case p => p } @@ -110,7 +110,7 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => yield { def forwarderSym(flags: FlagSet, info: Type): Symbol { type ThisName = TermName } = val sym = newSymbol(clazz, enumValue.name.asTermName, flags, info) - sym.addAnnotation(Annotations.Annotation(defn.ScalaStaticAnnot)) + sym.addAnnotation(Annotations.Annotation(defn.ScalaStaticAnnot, sym.span)) sym val body = moduleRef.select(enumValue) if ctx.settings.scalajs.value then diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala index be58fb41f1da..5863c360e728 100644 --- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala +++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala @@ -20,7 +20,7 @@ object ContextFunctionResults: */ def annotateContextResults(mdef: DefDef)(using Context): Unit = def contextResultCount(rhs: Tree, tp: Type): Int = tp match - case defn.ContextFunctionType(_, resTpe, _) => + case defn.ContextFunctionType(_, resTpe, erasedParams) if !erasedParams.contains(true) /* Only enable for non-erased functions */ => rhs match case closureDef(meth) => 1 + contextResultCount(meth.rhs, resTpe) case _ => 0 @@ -39,7 +39,7 @@ object ContextFunctionResults: val count = contextResultCount(mdef.rhs, mdef.tpt.tpe) if Config.flattenContextFunctionResults && count != 0 && !disabled then - val countAnnot = Annotation(defn.ContextResultCountAnnot, Literal(Constant(count))) + val countAnnot = Annotation(defn.ContextResultCountAnnot, Literal(Constant(count)), mdef.symbol.span) mdef.symbol.addAnnotation(countAnnot) end annotateContextResults @@ -58,7 +58,7 @@ object ContextFunctionResults: */ def contextResultsAreErased(sym: Symbol)(using Context): Boolean = def allErased(tp: Type): Boolean = tp.dealias match - case defn.ContextFunctionType(_, resTpe, isErased) => isErased && allErased(resTpe) + case defn.ContextFunctionType(_, resTpe, erasedParams) => !erasedParams.contains(false) && allErased(resTpe) case _ => true contextResultCount(sym) > 0 && allErased(sym.info.finalResultType) @@ -72,10 +72,8 @@ object ContextFunctionResults: integrateContextResults(rt, crCount) case tp: MethodOrPoly => tp.derivedLambdaType(resType = integrateContextResults(tp.resType, crCount)) - case defn.ContextFunctionType(argTypes, resType, isErased) => - val methodType: MethodTypeCompanion = - if isErased then ErasedMethodType else MethodType - methodType(argTypes, integrateContextResults(resType, crCount - 1)) + case defn.ContextFunctionType(argTypes, resType, erasedParams) => + MethodType(argTypes, integrateContextResults(resType, crCount - 1)) /** The total number of parameters of method `sym`, not counting * erased parameters, but including context result parameters. @@ -85,14 +83,16 @@ object ContextFunctionResults: def contextParamCount(tp: Type, crCount: Int): Int = if crCount == 0 then 0 else - val defn.ContextFunctionType(params, resTpe, isErased) = tp: @unchecked + val defn.ContextFunctionType(params, resTpe, erasedParams) = tp: @unchecked val rest = contextParamCount(resTpe, crCount - 1) - if isErased then rest else params.length + rest + if erasedParams.contains(true) then erasedParams.count(_ == false) + rest else params.length + rest def normalParamCount(tp: Type): Int = tp.widenExpr.stripPoly match case mt @ MethodType(pnames) => val rest = normalParamCount(mt.resType) - if mt.isErasedMethod then rest else pnames.length + rest + if mt.hasErasedParams then + mt.erasedParams.count(_ == false) + rest + else pnames.length + rest case _ => contextParamCount(tp, contextResultCount(sym)) normalParamCount(sym.info) @@ -133,4 +133,4 @@ object ContextFunctionResults: case _ => false -end ContextFunctionResults \ No newline at end of file +end ContextFunctionResults diff --git a/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala new file mode 100644 index 000000000000..3081bd5c2b20 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala @@ -0,0 +1,251 @@ +package dotty.tools +package dotc +package transform + +import ast.{Trees, tpd} +import core.* +import Decorators.* +import NameKinds.BoundaryName +import MegaPhase._ +import Types._, Contexts._, Flags._, DenotTransformers._ +import Symbols._, StdNames._, Trees._ +import util.Property +import Constants.Constant +import Flags.MethodOrLazy + +object DropBreaks: + val name: String = "dropBreaks" + val description: String = "replace local Break throws by labeled returns" + + /** Usage data and other info associated with a Label symbol. + * @param goto the return-label to use for a labeled return. + * @param enclMeth the enclosing method + */ + class LabelUsage(val goto: TermSymbol, val enclMeth: Symbol): + /** The number of references to associated label that come from labeled returns */ + var returnRefs: Int = 0 + /** The number of other references to associated label */ + var otherRefs: Int = 0 + + private val LabelUsages = new Property.Key[Map[Symbol, LabelUsage]] + private val ShadowedLabels = new Property.Key[Set[Symbol]] + +/** Rewrites local Break throws to labeled returns. + * Drops `try` statements on breaks if no other uses of its label remain. + * A Break throw with a `Label` created by some enclosing boundary is replaced + * with a labeled return if + * + * - the throw and the boundary are in the same method, and + * - there is no try expression inside the boundary that encloses the throw. + */ +class DropBreaks extends MiniPhase: + import DropBreaks.* + + import tpd._ + + override def phaseName: String = DropBreaks.name + + override def description: String = DropBreaks.description + + override def runsAfterGroupsOf: Set[String] = Set(ElimByName.name) + // we want by-name parameters to be converted to closures + + /** The number of boundary nodes enclosing the currently analized tree. */ + private var enclosingBoundaries: Int = 0 + + private object LabelTry: + + object GuardedThrow: + + /** `(ex, local)` provided `expr` matches + * + * if ex.label.eq(local) then ex.value else throw ex + */ + def unapply(expr: Tree)(using Context): Option[(Symbol, Symbol)] = stripTyped(expr) match + case If( + Apply(Select(Select(ex: Ident, label), eq), (lbl @ Ident(local)) :: Nil), + Select(ex2: Ident, value), + Apply(throww, (ex3: Ident) :: Nil)) + if label == nme.label && eq == nme.eq && local == nme.local && value == nme.value + && throww.symbol == defn.throwMethod + && ex.symbol == ex2.symbol && ex.symbol == ex3.symbol => + Some((ex.symbol, lbl.symbol)) + case _ => + None + end GuardedThrow + + /** `(local, body)` provided `tree` matches + * + * try body + * catch case ex: Break => + * if ex.label.eq(local) then ex.value else throw ex + */ + def unapply(tree: Tree)(using Context): Option[(Symbol, Tree)] = stripTyped(tree) match + case Try(body, CaseDef(pat @ Bind(_, Typed(_, tpt)), EmptyTree, GuardedThrow(exc, local)) :: Nil, EmptyTree) + if tpt.tpe.isRef(defn.BreakClass) && exc == pat.symbol => + Some((local, body)) + case _ => + None + end LabelTry + + private object BreakBoundary: + + /** `(local, body)` provided `tree` matches + * + * { val local: Label[...] = ...; } + */ + def unapply(tree: Tree)(using Context): Option[(Symbol, Tree)] = stripTyped(tree) match + case Block((vd @ ValDef(nme.local, _, _)) :: Nil, LabelTry(caughtAndRhs)) + if vd.symbol.info.isRef(defn.LabelClass) && vd.symbol == caughtAndRhs._1 => + Some(caughtAndRhs) + case _ => + None + end BreakBoundary + + private object Break: + + private def isBreak(sym: Symbol)(using Context): Boolean = + sym.name == nme.break && sym.owner == defn.boundaryModule.moduleClass + + /** `(local, arg)` provided `tree` matches + * + * break[...](arg)(local) + * + * or `(local, ())` provided `tree` matches + * + * break()(local) + */ + def unapply(tree: Tree)(using Context): Option[(Symbol, Tree)] = tree match + case Apply(Apply(fn, args), id :: Nil) + if isBreak(fn.symbol) => + stripInlined(id) match + case id: Ident => + val arg = (args: @unchecked) match + case arg :: Nil => arg + case Nil => Literal(Constant(())).withSpan(tree.span) + Some((id.symbol, arg)) + case _ => None + case _ => None + end Break + + /** The LabelUsage data associated with `lbl` in the current context */ + private def labelUsage(lbl: Symbol)(using Context): Option[LabelUsage] = + for + usesMap <- ctx.property(LabelUsages) + uses <- usesMap.get(lbl) + yield + uses + + /** If `tree` is a BreakBoundary, associate a fresh `LabelUsage` with its label. */ + override def prepareForBlock(tree: Block)(using Context): Context = tree match + case BreakBoundary(label, _) => + enclosingBoundaries += 1 + val mapSoFar = ctx.property(LabelUsages).getOrElse(Map.empty) + val goto = newSymbol(ctx.owner, BoundaryName.fresh(), Synthetic | Label, tree.tpe) + ctx.fresh.setProperty(LabelUsages, + mapSoFar.updated(label, LabelUsage(goto, ctx.owner.enclosingMethod))) + case _ => + ctx + + /** Include all enclosing labels in the `ShadowedLabels` context property. + * This means that breaks to these labels will not be translated to labeled + * returns while this context is valid. + */ + private def shadowLabels(using Context): Context = + ctx.property(LabelUsages) match + case Some(usesMap) => + val setSoFar = ctx.property(ShadowedLabels).getOrElse(Set.empty) + ctx.fresh.setProperty(ShadowedLabels, setSoFar ++ usesMap.keysIterator) + case _ => ctx + + /** Need to suppress labeled returns if there is an intervening try + */ + override def prepareForTry(tree: Try)(using Context): Context = + if enclosingBoundaries == 0 then ctx + else tree match + case LabelTry(_, _) => ctx + case _ => shadowLabels + + override def prepareForValDef(tree: ValDef)(using Context): Context = + if enclosingBoundaries != 0 + && tree.symbol.is(Lazy) + && tree.symbol.owner == ctx.owner.enclosingMethod + then shadowLabels // RHS be converted to a lambda + else ctx + + /** If `tree` is a BreakBoundary, transform it as follows: + * - Wrap it in a labeled block if its label has local uses + * - Drop the try/catch if its label has no other uses + */ + override def transformBlock(tree: Block)(using Context): Tree = tree match + case BreakBoundary(label, expr) => + enclosingBoundaries -= 1 + val uses = ctx.property(LabelUsages).get(label) + val tree1 = + if uses.otherRefs > 1 then + // one non-local ref is always in the catch clause; this one does not count + tree + else + expr + report.log(i"trans boundary block $label // ${uses.returnRefs}, ${uses.otherRefs}") + if uses.returnRefs > 0 then Labeled(uses.goto, tree1) else tree1 + case _ => + tree + + private def isBreak(sym: Symbol)(using Context): Boolean = + sym.name == nme.break && sym.owner == defn.boundaryModule.moduleClass + + private def transformBreak(tree: Tree, arg: Tree, lbl: Symbol)(using Context): Tree = + report.log(i"transform break $tree/$arg/$lbl") + labelUsage(lbl) match + case Some(uses: LabelUsage) + if uses.enclMeth == ctx.owner.enclosingMethod + && !ctx.property(ShadowedLabels).getOrElse(Set.empty).contains(lbl) + => + uses.otherRefs -= 1 + uses.returnRefs += 1 + Return(arg, ref(uses.goto)).withSpan(arg.span) + case _ => + tree + + + /** Rewrite a break call + * + * break.apply[...](value)(using lbl) + * + * where `lbl` is a label defined in the current method and is not included in + * ShadowedLabels to + * + * return[target] arg + * + * where `target` is the `goto` return label associated with `lbl`. + * Adjust associated ref counts accordingly. The local refcount is increased + * and the non-local refcount is decreased, since the `lbl` implicit argument + * to `break` is dropped. + */ + override def transformApply(tree: Apply)(using Context): Tree = + if enclosingBoundaries == 0 then tree + else tree match + case Break(lbl, arg) => + labelUsage(lbl) match + case Some(uses: LabelUsage) + if uses.enclMeth == ctx.owner.enclosingMethod + && !ctx.property(ShadowedLabels).getOrElse(Set.empty).contains(lbl) + => + uses.otherRefs -= 1 + uses.returnRefs += 1 + Return(arg, ref(uses.goto)).withSpan(arg.span) + case _ => tree + case _ => tree + + /** If `tree` refers to an enclosing label, increase its non local recount. + * This increase is corrected in `transformInlined` if the reference turns + * out to be part of a BreakThrow to a local, non-shadowed label. + */ + override def transformIdent(tree: Ident)(using Context): Tree = + if enclosingBoundaries != 0 then + for uses <- labelUsage(tree.symbol) do + uses.otherRefs += 1 + tree + +end DropBreaks diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala index 479a455b4aea..151e841f0e48 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala @@ -15,6 +15,7 @@ import MegaPhase.* import Decorators.* import typer.RefChecks import reporting.trace +import dotty.tools.dotc.core.Names.Name /** This phase implements the following transformations: * @@ -79,11 +80,14 @@ class ElimByName extends MiniPhase, InfoTransformer: case ExprType(rt) if exprBecomesFunction(sym) => defn.ByNameFunction(rt) case tp: MethodType => - def exprToFun(tp: Type) = tp match - case ExprType(rt) => defn.ByNameFunction(rt) + def exprToFun(tp: Type, name: Name) = tp match + case ExprType(rt) => + if rt.hasAnnotation(defn.ErasedParamAnnot) then + report.error(em"By-name parameter cannot be erased: $name", sym.srcPos) + defn.ByNameFunction(rt) case tp => tp tp.derivedLambdaType( - paramInfos = tp.paramInfos.mapConserve(exprToFun), + paramInfos = tp.paramInfos.zipWithConserve(tp.paramNames)(exprToFun), resType = transformInfo(tp.resType, sym)) case tp: PolyType => tp.derivedLambdaType(resType = transformInfo(tp.resType, sym)) diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala index bdc2a268c1f8..359b882ef26b 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala @@ -51,10 +51,10 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => // see https://github.com/scala/bug/issues/11714 val validJava = isValidJavaVarArgs(sym.info) if !validJava then - report.error("""To generate java-compatible varargs: + report.error(em"""To generate java-compatible varargs: | - there must be a single repeated parameter | - it must be the last argument in the last parameter list - |""".stripMargin, + |""", sym.sourcePos) else addVarArgsForwarder(sym, isJavaVarargsOverride, hasAnnotation, parentHasAnnotation) @@ -87,7 +87,8 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => * signatures of a Java varargs method and a Scala varargs override are not the same. */ private def overridesJava(sym: Symbol)(using Context) = - sym.owner.info.baseClasses.drop(1).exists { bc => + sym.memberCanMatchInheritedSymbols + && sym.owner.info.baseClasses.drop(1).exists { bc => bc.is(JavaDefined) && { val other = bc.info.nonPrivateDecl(sym.name) other.hasAltWith { alt => diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 84005424e3ec..981dd5f60aea 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -500,7 +500,7 @@ object Erasure { if isFunction && !ctx.settings.scalajs.value then val arity = implParamTypes.length val specializedFunctionalInterface = - if defn.isSpecializableFunctionSAM(implParamTypes, implResultType) then + if !implType.hasErasedParams && defn.isSpecializableFunctionSAM(implParamTypes, implResultType) then // Using these subclasses is critical to avoid boxing since their // SAM is a specialized method `apply$mc*$sp` whose default // implementation in FunctionN boxes. @@ -549,28 +549,30 @@ object Erasure { /** Check that Java statics and packages can only be used in selections. */ - private def checkNotErased(tree: Tree)(using Context): tree.type = { - if (!ctx.mode.is(Mode.Type)) { + private def checkNotErased(tree: Tree)(using Context): tree.type = + if !ctx.mode.is(Mode.Type) then if isErased(tree) then val msg = if tree.symbol.is(Flags.Inline) then em"""${tree.symbol} is declared as `inline`, but was not inlined | - |Try increasing `-Xmax-inlines` above ${ctx.settings.XmaxInlines.value}""".stripMargin - else em"${tree.symbol} is declared as `erased`, but is in fact used" + |Try increasing `-Xmax-inlines` above ${ctx.settings.XmaxInlines.value}""" + else + em"${tree.symbol} is declared as `erased`, but is in fact used" report.error(msg, tree.srcPos) - tree.symbol.getAnnotation(defn.CompileTimeOnlyAnnot) match { + tree.symbol.getAnnotation(defn.CompileTimeOnlyAnnot) match case Some(annot) => - def defaultMsg = - i"""Reference to ${tree.symbol.showLocated} should not have survived, - |it should have been processed and eliminated during expansion of an enclosing macro or term erasure.""" - val message = annot.argumentConstant(0).fold(defaultMsg)(_.stringValue) + val message = annot.argumentConstant(0) match + case Some(c) => + c.stringValue.toMessage + case _ => + em"""Reference to ${tree.symbol.showLocated} should not have survived, + |it should have been processed and eliminated during expansion of an enclosing macro or term erasure.""" report.error(message, tree.srcPos) case _ => // OK - } - } + checkNotErasedClass(tree) - } + end checkNotErased private def checkNotErasedClass(tp: Type, tree: untpd.Tree)(using Context): Unit = tp match case JavaArrayType(et) => @@ -614,7 +616,7 @@ object Erasure { * are handled separately by [[typedDefDef]], [[typedValDef]] and [[typedTyped]]. */ override def typedTypeTree(tree: untpd.TypeTree, pt: Type)(using Context): TypeTree = - checkNotErasedClass(tree.withType(erasure(tree.tpe))) + checkNotErasedClass(tree.withType(erasure(tree.typeOpt))) /** This override is only needed to semi-erase type ascriptions */ override def typedTyped(tree: untpd.Typed, pt: Type)(using Context): Tree = @@ -677,6 +679,8 @@ object Erasure { val qualTp = tree.qualifier.typeOpt.widen if qualTp.derivesFrom(defn.PolyFunctionClass) then erasePolyFunctionApply(qualTp.select(nme.apply).widen).classSymbol + else if defn.isErasedFunctionType(qualTp) then + eraseErasedFunctionApply(qualTp.select(nme.apply).widen.asInstanceOf[MethodType]).classSymbol else NoSymbol } @@ -696,18 +700,20 @@ object Erasure { return tree.asInstanceOf[Tree] // we are re-typing a primitive array op val owner = mapOwner(origSym) - var sym = if (owner eq origSym.maybeOwner) origSym else owner.info.decl(tree.name).symbol - if !sym.exists then - // We fail the sym.exists test for pos/i15158.scala, where we pass an infinitely - // recurring match type to an overloaded constructor. An equivalent test - // with regular apply methods succeeds. It's at present unclear whether - // - the program should be rejected, or - // - there is another fix. - // Therefore, we apply the fix to use the pre-erasure symbol, but only - // for constructors, in order not to mask other possible bugs that would - // trigger the assert(sym.exists, ...) below. - val prevSym = tree.symbol(using preErasureCtx) - if prevSym.isConstructor then sym = prevSym + val sym = + (if (owner eq origSym.maybeOwner) origSym else owner.info.decl(tree.name).symbol) + .orElse { + // We fail the sym.exists test for pos/i15158.scala, where we pass an infinitely + // recurring match type to an overloaded constructor. An equivalent test + // with regular apply methods succeeds. It's at present unclear whether + // - the program should be rejected, or + // - there is another fix. + // Therefore, we apply the fix to use the pre-erasure symbol, but only + // for constructors, in order not to mask other possible bugs that would + // trigger the assert(sym.exists, ...) below. + val prevSym = tree.symbol(using preErasureCtx) + if prevSym.isConstructor then prevSym else NoSymbol + } assert(sym.exists, i"no owner from $owner/${origSym.showLocated} in $tree") @@ -770,7 +776,7 @@ object Erasure { select(qual1, sym) else val castTarget = // Avoid inaccessible cast targets, see i8661 - if isJvmAccessible(sym.owner) + if isJvmAccessible(sym.owner) && sym.owner.isType then sym.owner.typeRef else @@ -780,7 +786,7 @@ object Erasure { val tp = originalQual if tp =:= qual1.tpe.widen then return errorTree(qual1, - ex"Unable to emit reference to ${sym.showLocated}, ${sym.owner} is not accessible in ${ctx.owner.enclosingClass}") + em"Unable to emit reference to ${sym.showLocated}, ${sym.owner} is not accessible in ${ctx.owner.enclosingClass}") tp recur(cast(qual1, castTarget)) } @@ -823,7 +829,10 @@ object Erasure { val Apply(fun, args) = tree val origFun = fun.asInstanceOf[tpd.Tree] val origFunType = origFun.tpe.widen(using preErasureCtx) - val ownArgs = if origFunType.isErasedMethod then Nil else args + val ownArgs = origFunType match + case mt: MethodType if mt.hasErasedParams => + args.zip(mt.erasedParams).collect { case (arg, false) => arg } + case _ => args val fun1 = typedExpr(fun, AnyFunctionProto) fun1.tpe.widen match case mt: MethodType => diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index cd6753eaed69..0bfc444e0997 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -145,15 +145,13 @@ class ExpandSAMs extends MiniPhase: def translateMatch(tree: Match, pfParam: Symbol, cases: List[CaseDef], defaultValue: Tree)(using Context) = { val selector = tree.selector - val selectorTpe = selector.tpe.widen - val defaultSym = newSymbol(pfParam.owner, nme.WILDCARD, SyntheticCase, selectorTpe) - val defaultCase = - CaseDef( - Bind(defaultSym, Underscore(selectorTpe)), - EmptyTree, - defaultValue) - val unchecked = selector.annotated(New(ref(defn.UncheckedAnnot.typeRef))) - cpy.Match(tree)(unchecked, cases :+ defaultCase) + val cases1 = if cases.exists(isDefaultCase) then cases + else + val selectorTpe = selector.tpe.widen + val defaultSym = newSymbol(pfParam.owner, nme.WILDCARD, SyntheticCase, selectorTpe) + val defaultCase = CaseDef(Bind(defaultSym, Underscore(selectorTpe)), EmptyTree, defaultValue) + cases :+ defaultCase + cpy.Match(tree)(selector, cases1) .subst(param.symbol :: Nil, pfParam :: Nil) // Needed because a partial function can be written as: // param => param match { case "foo" if foo(param) => param } @@ -186,7 +184,7 @@ class ExpandSAMs extends MiniPhase: private def checkRefinements(tpe: Type, tree: Tree)(using Context): Type = tpe.dealias match { case RefinedType(parent, name, _) => if (name.isTermName && tpe.member(name).symbol.ownersIterator.isEmpty) // if member defined in the refinement - report.error("Lambda does not define " + name, tree.srcPos) + report.error(em"Lambda does not define $name", tree.srcPos) checkRefinements(parent, tree) case tpe => tpe diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index ed3bfc7c0181..cddfe51275c8 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -72,9 +72,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => override def transformTemplate(impl: Template)(using Context): Tree = { val cls = ctx.owner.asClass val isTrait = cls.is(Trait) - if (needsOuterIfReferenced(cls) && - !needsOuterAlways(cls) && - impl.existsSubTree(referencesOuter(cls, _))) + if needsOuterIfReferenced(cls) && !needsOuterAlways(cls) && referencesOuter(cls, impl) then ensureOuterAccessors(cls) val clsHasOuter = hasOuter(cls) @@ -178,8 +176,9 @@ object ExplicitOuter { if prefix == NoPrefix then outerCls.typeRef.appliedTo(outerCls.typeParams.map(_ => TypeBounds.empty)) else prefix.widen) val info = if (flags.is(Method)) ExprType(target) else target + val currentNestingLevel = ctx.nestingLevel atPhaseNoEarlier(explicitOuterPhase.next) { // outer accessors are entered at explicitOuter + 1, should not be defined before. - newSymbol(owner, name, SyntheticArtifact | flags, info, coord = cls.coord) + newSymbol(owner, name, SyntheticArtifact | flags, info, coord = cls.coord, nestingLevel = currentNestingLevel) } } @@ -255,55 +254,83 @@ object ExplicitOuter { /** Tree references an outer class of `cls` which is not a static owner. */ - def referencesOuter(cls: Symbol, tree: Tree)(using Context): Boolean = { - def isOuterSym(sym: Symbol) = - !sym.isStaticOwner && cls.isProperlyContainedIn(sym) - def isOuterRef(ref: Type): Boolean = ref match { - case ref: ThisType => - isOuterSym(ref.cls) - case ref: TermRef => - if (ref.prefix ne NoPrefix) - !ref.symbol.isStatic && isOuterRef(ref.prefix) - else ( - ref.symbol.isOneOf(HoistableFlags) && - // ref.symbol will be placed in enclosing class scope by LambdaLift, so it might need - // an outer path then. - isOuterSym(ref.symbol.owner.enclosingClass) - || - // If not hoistable, ref.symbol will get a proxy in immediately enclosing class. If this properly - // contains the current class, it needs an outer path. - // If the symbol is hoistable, it might have free variables for which the same - // reasoning applies. See pos/i1664.scala - ctx.owner.enclosingClass.owner.enclosingClass.isContainedIn(ref.symbol.owner) - ) - case _ => false - } - def hasOuterPrefix(tp: Type): Boolean = tp.stripped match { - case AppliedType(tycon, _) => hasOuterPrefix(tycon) - case TypeRef(prefix, _) => isOuterRef(prefix) - case _ => false - } - def containsOuterRefs(tp: Type): Boolean = tp match - case tp: SingletonType => isOuterRef(tp) - case tp: AndOrType => containsOuterRefs(tp.tp1) || containsOuterRefs(tp.tp2) - case _ => false - tree match { - case _: This | _: Ident => isOuterRef(tree.tpe) - case nw: New => - val newCls = nw.tpe.classSymbol - isOuterSym(newCls.owner.enclosingClass) || - hasOuterPrefix(nw.tpe) || - newCls.owner.isTerm && cls.isProperlyContainedIn(newCls) - // newCls might get proxies for free variables. If current class is - // properly contained in newCls, it needs an outer path to newCls access the - // proxies and forward them to the new instance. - case app: TypeApply if app.symbol.isTypeTest => - // Type tests of singletons translate to `eq` tests with references, which might require outer pointers - containsOuterRefs(app.args.head.tpe) - case _ => - false - } - } + def referencesOuter(cls: Symbol, tree: Tree)(using Context): Boolean = + + val test = new TreeAccumulator[Boolean]: + private var inInline = false + + def isOuterSym(sym: Symbol) = + !sym.isStaticOwner && cls.isProperlyContainedIn(sym) + + def isOuterRef(ref: Type): Boolean = ref match + case ref: ThisType => + isOuterSym(ref.cls) + case ref: TermRef => + if (ref.prefix ne NoPrefix) + !ref.symbol.isStatic && isOuterRef(ref.prefix) + else ( + ref.symbol.isOneOf(HoistableFlags) && + // ref.symbol will be placed in enclosing class scope by LambdaLift, so it might need + // an outer path then. + isOuterSym(ref.symbol.owner.enclosingClass) + || + // If not hoistable, ref.symbol will get a proxy in immediately enclosing class. If this properly + // contains the current class, it needs an outer path. + // If the symbol is hoistable, it might have free variables for which the same + // reasoning applies. See pos/i1664.scala + ctx.owner.enclosingClass.owner.enclosingClass.isContainedIn(ref.symbol.owner) + ) + case _ => false + + def hasOuterPrefix(tp: Type): Boolean = tp.stripped match + case AppliedType(tycon, _) => hasOuterPrefix(tycon) + case TypeRef(prefix, _) => isOuterRef(prefix) + case _ => false + + def containsOuterRefsAtTopLevel(tp: Type): Boolean = tp match + case tp: SingletonType => isOuterRef(tp) + case tp: AndOrType => containsOuterRefsAtTopLevel(tp.tp1) || containsOuterRefsAtTopLevel(tp.tp2) + case _ => false + + def containsOuterRefsAnywhere(tp: Type): Boolean = + tp.existsPart({ + case t: SingletonType => isOuterRef(t) + case _ => false + }, StopAt.Static) + + def containsOuterRefs(t: Tree): Boolean = t match + case _: This | _: Ident => isOuterRef(t.tpe) + case nw: New => + val newType = nw.tpe.dealias + val newCls = newType.classSymbol + isOuterSym(newCls.owner.enclosingClass) || + hasOuterPrefix(newType) || + newCls.owner.isTerm && cls.isProperlyContainedIn(newCls) + // newCls might get proxies for free variables. If current class is + // properly contained in newCls, it needs an outer path to newCls access the + // proxies and forward them to the new instance. + case app: TypeApply if app.symbol.isTypeTest => + // Type tests of singletons translate to `eq` tests with references, which might require outer pointers + containsOuterRefsAtTopLevel(app.args.head.tpe.dealias) + case t: TypeTree if inInline => + // Expansions of inline methods must be able to address outer types + containsOuterRefsAnywhere(t.tpe.dealias) + case _ => + false + + def apply(x: Boolean, t: Tree)(using Context) = + if x || containsOuterRefs(t) then true + else t match + case t: DefDef if t.symbol.isInlineMethod => + val saved = inInline + inInline = true + try foldOver(x, t) + finally inInline = saved + case _ => + foldOver(x, t) + + test(false, tree) + end referencesOuter private final val HoistableFlags = Method | Lazy | Module diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala index 9c580235a2e4..a430f7532066 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala @@ -13,7 +13,7 @@ import core._ import Types._, Contexts._, Names._, Flags._, DenotTransformers._, Phases._ import SymDenotations._, Symbols._, StdNames._, Denotations._ import TypeErasure.{ valueErasure, ErasedValueType } -import NameKinds.ExtMethName +import NameKinds.{ExtMethName, BodyRetainerName} import Decorators._ import TypeUtils._ @@ -79,7 +79,7 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete // because it adds extension methods before pickling. if (!(valueClass.is(Scala2x))) for (decl <- valueClass.classInfo.decls) - if (isMethodWithExtension(decl)) + if isMethodWithExtension(decl) then enterInModuleClass(createExtensionMethod(decl, moduleClassSym.symbol)) // Create synthetic methods to cast values between the underlying type @@ -179,7 +179,10 @@ object ExtensionMethods { /** Name of the extension method that corresponds to given instance method `meth`. */ def extensionName(imeth: Symbol)(using Context): TermName = - ExtMethName(imeth.name.asTermName) + ExtMethName( + imeth.name.asTermName match + case BodyRetainerName(name) => name + case name => name) /** Return the extension method that corresponds to given instance method `meth`. */ def extensionMethod(imeth: Symbol)(using Context): TermSymbol = @@ -188,9 +191,17 @@ object ExtensionMethods { val companion = imeth.owner.companionModule val companionInfo = companion.info val candidates = companionInfo.decl(extensionName(imeth)).alternatives - val matching = - // See the documentation of `memberSignature` to understand why `.stripPoly.ensureMethodic` is needed here. - candidates filter (c => FullParameterization.memberSignature(c.info) == imeth.info.stripPoly.ensureMethodic.signature) + def matches(candidate: SingleDenotation) = + FullParameterization.memberSignature(candidate.info) == imeth.info.stripPoly.ensureMethodic.signature + // See the documentation of `memberSignature` to understand why `.stripPoly.ensureMethodic` is needed here. + && (if imeth.targetName == imeth.name then + // imeth does not have a @targetName annotation, candidate should not have one either + candidate.symbol.targetName == candidate.symbol.name + else + // imeth has a @targetName annotation, candidate's target name must match + imeth.targetName == candidate.symbol.targetName + ) + val matching = candidates.filter(matches) assert(matching.nonEmpty, i"""no extension method found for: | @@ -203,6 +214,9 @@ object ExtensionMethods { | Candidates (signatures normalized): | | ${candidates.map(c => s"${c.name}:${c.info.signature}:${FullParameterization.memberSignature(c.info)}").mkString("\n")}""") + if matching.tail.nonEmpty then + // this case will report a "have the same erasure" error later at erasure pahse + report.log(i"mutiple extension methods match $imeth: ${candidates.map(c => i"${c.name}:${c.info}")}") matching.head.symbol.asTerm } } diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index 6968eb271961..a7e0795ce195 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -17,6 +17,7 @@ import NameOps._ import NameKinds.OuterSelectName import StdNames._ import TypeUtils.isErasedValueType +import config.Feature object FirstTransform { val name: String = "firstTransform" @@ -102,7 +103,7 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => } /** Eliminate self in Template - * Under -Ycc, we keep the self type `S` around in a type definition + * Under captureChecking, we keep the self type `S` around in a type definition * * private[this] type $this = S * @@ -110,7 +111,7 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => */ override def transformTemplate(impl: Template)(using Context): Tree = impl.self match - case self: ValDef if !self.tpt.isEmpty && ctx.settings.Ycc.value => + case self: ValDef if !self.tpt.isEmpty && Feature.ccEnabled => val tsym = newSymbol(ctx.owner, tpnme.SELF, PrivateLocal, TypeAlias(self.tpt.tpe)) val tdef = untpd.cpy.TypeDef(self)(tpnme.SELF, self.tpt).withType(tsym.typeRef) cpy.Template(impl)(self = EmptyValDef, body = tdef :: impl.body) diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index 9a6ab233e239..a1baeac272b9 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -5,16 +5,19 @@ package transform import core.Annotations._ import core.Contexts._ import core.Phases._ +import core.Decorators.* import core.Definitions import core.Flags._ import core.Names.Name import core.Symbols._ import core.TypeApplications.{EtaExpansion, TypeParamInfo} -import core.TypeErasure.{erasedGlb, erasure, isGenericArrayElement} +import core.TypeErasure.{erasedGlb, erasure, fullErasure, isGenericArrayElement} import core.Types._ import core.classfile.ClassfileConstants import SymUtils._ import TypeUtils._ +import config.Printers.transforms +import reporting.trace import java.lang.StringBuilder import scala.collection.mutable.ListBuffer @@ -130,12 +133,12 @@ object GenericSignatures { else Right(parent)) - def paramSig(param: LambdaParam): Unit = { - builder.append(sanitizeName(param.paramName)) + def paramSig(param: TypeParamInfo): Unit = { + builder.append(sanitizeName(param.paramName.lastPart)) boundsSig(hiBounds(param.paramInfo.bounds)) } - def polyParamSig(tparams: List[LambdaParam]): Unit = + def polyParamSig(tparams: List[TypeParamInfo]): Unit = if (tparams.nonEmpty) { builder.append('<') tparams.foreach(paramSig) @@ -236,7 +239,11 @@ object GenericSignatures { tp match { case ref @ TypeParamRef(_: PolyType, _) => - typeParamSig(ref.paramName.lastPart) + val erasedUnderlying = fullErasure(ref.underlying.bounds.hi) + // don't emit type param name if the param is upper-bounded by a primitive type (including via a value class) + if erasedUnderlying.isPrimitiveValueType then + jsig(erasedUnderlying, toplevel, primitiveOK) + else typeParamSig(ref.paramName.lastPart) case defn.ArrayOf(elemtp) => if (isGenericArrayElement(elemtp, isScala2 = false)) @@ -267,7 +274,7 @@ object GenericSignatures { else if (sym == defn.UnitClass) jsig(defn.BoxedUnitClass.typeRef) else builder.append(defn.typeTag(sym.info)) else if (ValueClasses.isDerivedValueClass(sym)) { - val erasedUnderlying = core.TypeErasure.fullErasure(tp) + val erasedUnderlying = fullErasure(tp) if (erasedUnderlying.isPrimitiveValueType && !primitiveOK) classSig(sym, pre, args) else @@ -304,7 +311,9 @@ object GenericSignatures { case mtpe: MethodType => // erased method parameters do not make it to the bytecode. def effectiveParamInfoss(t: Type)(using Context): List[List[Type]] = t match { - case t: MethodType if t.isErasedMethod => effectiveParamInfoss(t.resType) + case t: MethodType if t.hasErasedParams => + t.paramInfos.zip(t.erasedParams).collect{ case (i, false) => i } + :: effectiveParamInfoss(t.resType) case t: MethodType => t.paramInfos :: effectiveParamInfoss(t.resType) case _ => Nil } @@ -334,15 +343,6 @@ object GenericSignatures { jsig(repr, primitiveOK = primitiveOK) case ci: ClassInfo => - def polyParamSig(tparams: List[TypeParamInfo]): Unit = - if (tparams.nonEmpty) { - builder.append('<') - tparams.foreach { tp => - builder.append(sanitizeName(tp.paramName.lastPart)) - boundsSig(hiBounds(tp.paramInfo.bounds)) - } - builder.append('>') - } val tParams = tp.typeParams if (toplevel) polyParamSig(tParams) superSig(ci.typeSymbol, ci.parents) diff --git a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala index edbfbd1552c4..9a36d65babe8 100644 --- a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala +++ b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala @@ -13,6 +13,7 @@ import collection.mutable import ast.Trees._ import core.NameKinds.SuperArgName import SymUtils._ +import core.Decorators.* object HoistSuperArgs { val name: String = "hoistSuperArgs" @@ -181,7 +182,9 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase /** Hoist complex arguments in super call out of the class. */ def hoistSuperArgsFromCall(superCall: Tree, cdef: DefDef, lifted: mutable.ListBuffer[Symbol]): Tree = superCall match - case Block(defs, expr) => + case Block(defs, expr) if !expr.symbol.owner.is(Scala2x) => + // MO: The guard avoids the crash for #16351. + // It would be good to dig deeper, but I won't have the time myself to do it. cpy.Block(superCall)( stats = defs.mapconserve { case vdef: ValDef => diff --git a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala index 6edb60a77245..798f34757b35 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala @@ -8,6 +8,8 @@ import Symbols._, Contexts._, Types._, Decorators._ import NameOps._ import Names._ +import scala.collection.mutable.ListBuffer + /** Rewrite an application * * {new { def unapply(x0: X0)(x1: X1,..., xn: Xn) = b }}.unapply(y0)(y1, ..., yn) @@ -38,7 +40,7 @@ class InlinePatterns extends MiniPhase: if app.symbol.name.isUnapplyName && !app.tpe.isInstanceOf[MethodicType] then app match case App(Select(fn, name), argss) => - val app1 = betaReduce(app, fn, name, argss.flatten) + val app1 = betaReduce(app, fn, name, argss) if app1 ne app then report.log(i"beta reduce $app -> $app1") app1 case _ => @@ -51,11 +53,16 @@ class InlinePatterns extends MiniPhase: case Apply(App(fn, argss), args) => (fn, argss :+ args) case _ => (app, Nil) - private def betaReduce(tree: Apply, fn: Tree, name: Name, args: List[Tree])(using Context): Tree = + // TODO merge with BetaReduce.scala + private def betaReduce(tree: Apply, fn: Tree, name: Name, argss: List[List[Tree]])(using Context): Tree = fn match case Block(TypeDef(_, template: Template) :: Nil, Apply(Select(New(_),_), Nil)) if template.constr.rhs.isEmpty => template.body match - case List(ddef @ DefDef(`name`, _, _, _)) => BetaReduce(ddef, args) + case List(ddef @ DefDef(`name`, _, _, _)) => + val bindings = new ListBuffer[DefTree]() + val expansion1 = BetaReduce.reduceApplication(ddef, argss, bindings) + val bindings1 = bindings.result() + seq(bindings1, expansion1) case _ => tree case _ => tree diff --git a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala index 65212ec2c0cc..047a187bad68 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala @@ -38,8 +38,8 @@ class InlineVals extends MiniPhase: tpt.tpe.widenTermRefExpr.dealiasKeepOpaques.normalized match case tp: ConstantType => if !isPureExpr(rhs) then - val details = if enclosingInlineds.isEmpty then "" else em"but was: $rhs" - report.error(s"inline value must be pure$details", rhs.srcPos) + def details = if enclosingInlineds.isEmpty then "" else i"but was: $rhs" + report.error(em"inline value must be pure$details", rhs.srcPos) case tp => if tp.typeSymbol.is(Opaque) then report.error(em"The type of an `inline val` cannot be an opaque type.\n\nTo inline, consider using `inline def` instead", rhs) diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 5ddcf600c63a..d6b7f3141b96 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -7,14 +7,19 @@ import Contexts._ import Symbols._ import SymUtils._ import dotty.tools.dotc.ast.tpd - -import dotty.tools.dotc.core.StagingContext._ +import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.quoted._ import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.ast.TreeMapWithImplicits +import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer +import dotty.tools.dotc.staging.QuoteContext.* +import dotty.tools.dotc.staging.StagingLevel +import scala.collection.mutable.ListBuffer /** Inlines all calls to inline methods that are not in an inline method or a quote */ class Inlining extends MacroTransform { + import tpd._ override def phaseName: String = Inlining.name @@ -23,8 +28,10 @@ class Inlining extends MacroTransform { override def allowsImplicitSearch: Boolean = true + override def changesMembers: Boolean = true + override def run(using Context): Unit = - if ctx.compilationUnit.needsInlining then + if ctx.compilationUnit.needsInlining || ctx.compilationUnit.hasMacroAnnotations then try super.run catch case _: CompilationUnit.SuspendException => () @@ -40,10 +47,10 @@ class Inlining extends MacroTransform { def traverse(tree: Tree)(using Context): Unit = tree match case _: GenericApply if tree.symbol.isQuote => - traverseChildren(tree)(using StagingContext.quoteContext) + traverseChildren(tree)(using StagingLevel.quoteContext) case _: GenericApply if tree.symbol.isExprSplice => - traverseChildren(tree)(using StagingContext.spliceContext) - case tree: RefTree if !Inlines.inInlineMethod && StagingContext.level == 0 => + traverseChildren(tree)(using StagingLevel.spliceContext) + case tree: RefTree if !Inlines.inInlineMethod && StagingLevel.level == 0 => assert(!tree.symbol.isInlineMethod, tree.show) case _ => traverseChildren(tree) @@ -57,10 +64,33 @@ class Inlining extends MacroTransform { } private class InliningTreeMap extends TreeMapWithImplicits { + + /** List of top level classes added by macro annotation in a package object. + * These are added to the PackageDef that owns this particular package object. + */ + private val newTopClasses = MutableSymbolMap[ListBuffer[Tree]]() + override def transform(tree: Tree)(using Context): Tree = { tree match - case tree: DefTree => + case tree: MemberDef => if tree.symbol.is(Inline) then tree + else if tree.symbol.is(Param) then super.transform(tree) + else if + !tree.symbol.isPrimaryConstructor + && StagingLevel.level == 0 + && MacroAnnotations.hasMacroAnnotation(tree.symbol) + then + val trees = (new MacroAnnotations).expandAnnotations(tree) + val trees1 = trees.map(super.transform) + + // Find classes added to the top level from a package object + val (topClasses, trees2) = + if ctx.owner.isPackageObject then trees1.partition(_.symbol.owner == ctx.owner.owner) + else (Nil, trees1) + if topClasses.nonEmpty then + newTopClasses.getOrElseUpdate(ctx.owner.owner, new ListBuffer) ++= topClasses + + flatTree(trees2) else super.transform(tree) case _: Typed | _: Block => super.transform(tree) @@ -69,9 +99,19 @@ class Inlining extends MacroTransform { if tree1.tpe.isError then tree1 else Inlines.inlineCall(tree1) case _: GenericApply if tree.symbol.isQuote => - super.transform(tree)(using StagingContext.quoteContext) + super.transform(tree)(using StagingLevel.quoteContext) case _: GenericApply if tree.symbol.isExprSplice => - super.transform(tree)(using StagingContext.spliceContext) + super.transform(tree)(using StagingLevel.spliceContext) + case _: PackageDef => + super.transform(tree) match + case tree1: PackageDef => + newTopClasses.get(tree.symbol.moduleClass) match + case Some(topClasses) => + newTopClasses.remove(tree.symbol.moduleClass) + val newStats = tree1.stats ::: topClasses.result() + cpy.PackageDef(tree1)(tree1.pid, newStats) + case _ => tree1 + case tree1 => tree1 case _ => super.transform(tree) } diff --git a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala index 1cc1340ad7c7..29572a4ae30d 100644 --- a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala +++ b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala @@ -11,18 +11,21 @@ import core.DenotTransformers.IdentityDenotTransformer import core.Symbols.{defn, Symbol} import core.Constants.Constant import core.NameOps.isContextFunction +import core.StdNames.nme import core.Types.* import coverage.* import typer.LiftCoverage -import util.SourcePosition +import util.{SourcePosition, SourceFile} import util.Spans.Span import localopt.StringInterpolatorOpt +import inlines.Inlines /** Implements code coverage by inserting calls to scala.runtime.coverage.Invoker * ("instruments" the source code). * The result can then be consumed by the Scoverage tool. */ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: + import InstrumentCoverage.{InstrumentedParts, ExcludeMethodFlags} override def phaseName = InstrumentCoverage.name @@ -55,186 +58,339 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: Serializer.serialize(coverage, outputPath, ctx.settings.sourceroot.value) - override protected def newTransformer(using Context) = CoverageTransformer() + override protected def newTransformer(using Context) = + CoverageTransformer(ctx.settings.coverageOutputDir.value) /** Transforms trees to insert calls to Invoker.invoked to compute the coverage when the code is called */ - private class CoverageTransformer extends Transformer: + private class CoverageTransformer(outputPath: String) extends Transformer: + private val ConstOutputPath = Constant(outputPath) + + /** Generates the tree for: + * ``` + * Invoker.invoked(id, DIR) + * ``` + * where DIR is the _outputPath_ defined by the coverage settings. + */ + private def invokeCall(id: Int, span: Span)(using Context): Apply = + ref(defn.InvokedMethodRef).withSpan(span) + .appliedToArgs( + Literal(Constant(id)) :: Literal(ConstOutputPath) :: Nil + ).withSpan(span) + .asInstanceOf[Apply] + + /** + * Records information about a new coverable statement. Generates a unique id for it. + * + * @param tree the tree to add to the coverage report + * @param pos the position to save in the report + * @param branch true if it's a branch (branches are considered differently by most coverage analysis tools) + * @param ctx the current context + * @return the statement's id + */ + private def recordStatement(tree: Tree, pos: SourcePosition, branch: Boolean)(using ctx: Context): Int = + val id = statementId + statementId += 1 + + val sourceFile = pos.source + val statement = Statement( + location = Location(tree, sourceFile), + id = id, + start = pos.start, + end = pos.end, + line = pos.line, + desc = sourceFile.content.slice(pos.start, pos.end).mkString, + symbolName = tree.symbol.name.toSimpleName.toString, + treeName = tree.getClass.getSimpleName.nn, + branch + ) + coverage.addStatement(statement) + id + + /** + * Adds a new statement to the current `Coverage` and creates a corresponding call + * to `Invoker.invoke` with its id, and the given position. + * + * Note that the entire tree won't be saved in the coverage analysis, only some + * data related to the tree is recorded (e.g. its type, its parent class, ...). + * + * @param tree the tree to add to the coverage report + * @param pos the position to save in the report + * @param branch true if it's a branch + * @return the tree corresponding to the call to `Invoker.invoke` + */ + private def createInvokeCall(tree: Tree, pos: SourcePosition, branch: Boolean = false)(using Context): Apply = + val statementId = recordStatement(tree, pos, branch) + val span = pos.span.toSynthetic + invokeCall(statementId, span) + + /** + * Tries to instrument an `Apply`. + * These "tryInstrument" methods are useful to tweak the generation of coverage instrumentation, + * in particular in `case TypeApply` in the [[transform]] method. + * + * @param tree the tree to instrument + * @return instrumentation result, with the preparation statement, coverage call and tree separated + */ + private def tryInstrument(tree: Apply)(using Context): InstrumentedParts = + if canInstrumentApply(tree) then + // Create a call to Invoker.invoked(coverageDirectory, newStatementId) + val coverageCall = createInvokeCall(tree, tree.sourcePos) + + if needsLift(tree) then + // Transform args and fun, i.e. instrument them if needed (and if possible) + val app = cpy.Apply(tree)(transform(tree.fun), tree.args.map(transform)) + + // Lifts the arguments. Note that if only one argument needs to be lifted, we lift them all. + // Also, tree.fun can be lifted too. + // See LiftCoverage for the internal working of this lifting. + val liftedDefs = mutable.ListBuffer[Tree]() + val liftedApp = LiftCoverage.liftForCoverage(liftedDefs, app) + + InstrumentedParts(liftedDefs.toList, coverageCall, liftedApp) + else + // Instrument without lifting + val transformed = cpy.Apply(tree)(transform(tree.fun), transform(tree.args)) + InstrumentedParts.singleExpr(coverageCall, transformed) + else + // Transform recursively but don't instrument the tree itself + val transformed = cpy.Apply(tree)(transform(tree.fun), transform(tree.args)) + InstrumentedParts.notCovered(transformed) + + private def tryInstrument(tree: Ident)(using Context): InstrumentedParts = + val sym = tree.symbol + if canInstrumentParameterless(sym) then + // call to a local parameterless method f + val coverageCall = createInvokeCall(tree, tree.sourcePos) + InstrumentedParts.singleExpr(coverageCall, tree) + else + InstrumentedParts.notCovered(tree) + + private def tryInstrument(tree: Select)(using Context): InstrumentedParts = + val sym = tree.symbol + val transformed = cpy.Select(tree)(transform(tree.qualifier), tree.name) + if canInstrumentParameterless(sym) then + // call to a parameterless method + val coverageCall = createInvokeCall(tree, tree.sourcePos) + InstrumentedParts.singleExpr(coverageCall, transformed) + else + InstrumentedParts.notCovered(transformed) + + /** Generic tryInstrument */ + private def tryInstrument(tree: Tree)(using Context): InstrumentedParts = + tree match + case t: Apply => tryInstrument(t) + case t: Ident => tryInstrument(t) + case t: Select => tryInstrument(t) + case _ => InstrumentedParts.notCovered(transform(tree)) + + /** + * Transforms and instruments a branch if it's non-empty. + * If the tree is empty, return itself and don't instrument. + */ + private def transformBranch(tree: Tree)(using Context): Tree = + import dotty.tools.dotc.core.Decorators.{show,i} + if tree.isEmpty || tree.span.isSynthetic then + // - If t.isEmpty then `transform(t) == t` always hold, + // so we can avoid calling transform in that case. + // - If tree.span.isSynthetic then the branch has been generated + // by the frontend phases, so we don't want to instrument it. + tree + else + val transformed = transform(tree) + val coverageCall = createInvokeCall(tree, tree.sourcePos, branch = true) + InstrumentedParts.singleExprTree(coverageCall, transformed) + override def transform(tree: Tree)(using Context): Tree = inContext(transformCtx(tree)) { // necessary to position inlined code properly tree match // simple cases case tree: (Import | Export | Literal | This | Super | New) => tree case tree if tree.isEmpty || tree.isType => tree // empty Thicket, Ident (referring to a type), TypeTree, ... + case tree if !tree.span.exists || tree.span.isZeroExtent => tree // no meaningful position // identifier case tree: Ident => - val sym = tree.symbol - if canInstrumentParameterless(sym) then - // call to a local parameterless method f - instrument(tree) - else - tree + tryInstrument(tree).toTree // branches case tree: If => cpy.If(tree)( cond = transform(tree.cond), - thenp = instrument(transform(tree.thenp), branch = true), - elsep = instrument(transform(tree.elsep), branch = true) + thenp = transformBranch(tree.thenp), + elsep = transformBranch(tree.elsep) ) case tree: Try => cpy.Try(tree)( - expr = instrument(transform(tree.expr), branch = true), - cases = instrumentCases(tree.cases), - finalizer = instrument(transform(tree.finalizer), branch = true) + expr = transformBranch(tree.expr), + cases = tree.cases.map(transformCaseDef), + finalizer = transformBranch(tree.finalizer) ) // f(args) case tree: Apply => - if canInstrumentApply(tree) then - if needsLift(tree) then - instrumentLifted(tree) - else - instrument(transformApply(tree)) - else - transformApply(tree) + tryInstrument(tree).toTree // (fun)[args] case TypeApply(fun, args) => - val tfun = transform(fun) - tfun match - case InstrumentCoverage.InstrumentedBlock(invokeCall, expr) => - // expr[T] shouldn't be transformed to - // {invoked(...), expr}[T] - // - // but to - // {invoked(...), expr[T]} - // - // This is especially important for trees like (expr[T])(args), - // for which the wrong transformation crashes the compiler. - // See tests/coverage/pos/PolymorphicExtensions.scala - Block( - invokeCall :: Nil, - cpy.TypeApply(tree)(expr, args) - ) - case _ => - cpy.TypeApply(tree)(tfun, args) + // Here is where `InstrumentedParts` becomes useful! + // We extract its components and act carefully. + val InstrumentedParts(pre, coverageCall, expr) = tryInstrument(fun) - // a.b - case Select(qual, name) => - val transformed = cpy.Select(tree)(transform(qual), name) - val sym = tree.symbol - if canInstrumentParameterless(sym) then - // call to a parameterless method - instrument(transformed) + if coverageCall.isEmpty then + // `fun` cannot be instrumented, and `args` is a type so we keep this tree as it is + tree else - transformed + // expr[T] shouldn't be transformed to: + // {invoked(...), expr}[T] + // + // but to: + // {invoked(...), expr[T]} + // + // This is especially important for trees like (expr[T])(args), + // for which the wrong transformation crashes the compiler. + // See tests/coverage/pos/PolymorphicExtensions.scala + Block( + pre :+ coverageCall, + cpy.TypeApply(tree)(expr, args) + ) + + // a.b + case tree: Select => + tryInstrument(tree).toTree + + case tree: CaseDef => + transformCaseDef(tree) - case tree: CaseDef => instrumentCaseDef(tree) case tree: ValDef => // only transform the rhs val rhs = transform(tree.rhs) cpy.ValDef(tree)(rhs = rhs) case tree: DefDef => - if tree.symbol.isOneOf(Inline | Erased) then - // Inline and erased definitions will not be in the generated code and therefore do not need to be instrumented. - // Note that a retained inline method will have a `$retained` variant that will be instrumented. - tree - else - // Only transform the params (for the default values) and the rhs. - val paramss = transformParamss(tree.paramss) - val rhs = transform(tree.rhs) - val finalRhs = - if canInstrumentDefDef(tree) then - // Ensure that the rhs is always instrumented, if possible. - // This is useful because methods can be stored and called later, or called by reflection, - // and if the rhs is too simple to be instrumented (like `def f = this`), the method won't show up as covered. - instrumentBody(tree, rhs) - else - rhs - cpy.DefDef(tree)(tree.name, paramss, tree.tpt, finalRhs) - end if + transformDefDef(tree) + case tree: PackageDef => // only transform the statements of the package cpy.PackageDef(tree)(tree.pid, transform(tree.stats)) + case tree: Assign => // only transform the rhs cpy.Assign(tree)(tree.lhs, transform(tree.rhs)) + case tree: Return => + // only transform the expr, because `from` is a "pointer" + // to the enclosing method, not a tree to instrument. + cpy.Return(tree)(expr = transform(tree.expr), from = tree.from) + + case tree: Template => + // only transform: + // - the arguments of the `Apply` trees in the parents + // - the template body + cpy.Template(tree)( + transformSub(tree.constr), + transformTemplateParents(tree.parents)(using ctx.superCallContext), + tree.derived, + tree.self, + transformStats(tree.body, tree.symbol) + ) + + case tree: Inlined => + // Ideally, tree.call would provide precise information about the inlined call, + // and we would use this information for the coverage report. + // But PostTyper simplifies tree.call, so we can't report the actual method that was inlined. + // In any case, the subtrees need to be repositioned right now, otherwise the + // coverage statement will point to a potentially unreachable source file. + val dropped = Inlines.dropInlined(tree) // drop and reposition + transform(dropped) // transform the content of the Inlined + // For everything else just recurse and transform - // Special care for Templates: it's important to set the owner of the `stats`, like super.transform case _ => super.transform(tree) } - /** Lifts and instruments an application. - * Note that if only one arg needs to be lifted, we just lift everything (see LiftCoverage). + /** Transforms a `def lhs = rhs` and instruments its body (rhs). + * + * The rhs is always transformed recursively. + * + * If possible, a coverage call is inserted at the beginning of the body + * (never outside of the DefDef tree). Therefore, this method always returns a `DefDef`. + * Thanks to this, it doesn't need to be wrapped in an`InstrumentedParts`. */ - private def instrumentLifted(tree: Apply)(using Context) = - // lifting - val buffer = mutable.ListBuffer[Tree]() - val liftedApply = LiftCoverage.liftForCoverage(buffer, tree) - - // instrumentation - val instrumentedArgs = buffer.toList.map(transform) - val instrumentedApply = instrument(liftedApply) - Block( - instrumentedArgs, - instrumentedApply - ) - - private inline def transformApply(tree: Apply)(using Context): Apply = - cpy.Apply(tree)(transform(tree.fun), transform(tree.args)) - - private inline def instrumentCases(cases: List[CaseDef])(using Context): List[CaseDef] = - cases.map(instrumentCaseDef) - - private def instrumentCaseDef(tree: CaseDef)(using Context): CaseDef = + private def transformDefDef(tree: DefDef)(using Context): DefDef = + val sym = tree.symbol + if sym.isOneOf(Inline | Erased) then + // Inline and erased definitions will not be in the generated code and therefore do not need to be instrumented. + // (Note that a retained inline method will have a `$retained` variant that will be instrumented.) + tree + else + // Only transform the params (for the default values) and the rhs, not the name and tpt. + val transformedParamss = transformParamss(tree.paramss) + val transformedRhs = + if tree.rhs.isEmpty then + tree.rhs + else if sym.isClassConstructor then + instrumentSecondaryCtor(tree) + else if !sym.isOneOf(Accessor | Artifact | Synthetic) then + // If the body can be instrumented, do it (i.e. insert a "coverage call" at the beginning) + // This is useful because methods can be stored and called later, or called by reflection, + // and if the rhs is too simple to be instrumented (like `def f = this`), + // the method won't show up as covered if we don't insert a call at its beginning. + instrumentBody(tree, transform(tree.rhs)) + else + transform(tree.rhs) + + cpy.DefDef(tree)(tree.name, transformedParamss, tree.tpt, transformedRhs) + + /** Transforms a `case ...` and instruments the parts that can be. */ + private def transformCaseDef(tree: CaseDef)(using Context): CaseDef = val pat = tree.pat val guard = tree.guard + + // compute a span that makes sense for the user that will read the coverage results val friendlyEnd = if guard.span.exists then guard.span.end else pat.span.end val pos = tree.sourcePos.withSpan(tree.span.withEnd(friendlyEnd)) // user-friendly span - // ensure that the body is always instrumented by inserting a call to Invoker.invoked at its beginning - val instrumentedBody = instrument(transform(tree.body), pos, false) - cpy.CaseDef(tree)(tree.pat, transform(tree.guard), instrumentedBody) - - /** Records information about a new coverable statement. Generates a unique id for it. - * @return the statement's id - */ - private def recordStatement(tree: Tree, pos: SourcePosition, branch: Boolean)(using ctx: Context): Int = - val id = statementId - statementId += 1 - val statement = Statement( - source = ctx.source.file.name, - location = Location(tree), - id = id, - start = pos.start, - end = pos.end, - line = pos.line, - desc = tree.source.content.slice(pos.start, pos.end).mkString, - symbolName = tree.symbol.name.toSimpleName.toString, - treeName = tree.getClass.getSimpleName.nn, - branch - ) - coverage.addStatement(statement) - id - - private inline def syntheticSpan(pos: SourcePosition): Span = pos.span.toSynthetic - /** Shortcut for instrument(tree, tree.sourcePos, branch) */ - private inline def instrument(tree: Tree, branch: Boolean = false)(using Context): Tree = - instrument(tree, tree.sourcePos, branch) + // recursively transform the guard, but keep the pat + val transformedGuard = transform(guard) - /** Instruments a statement, if it has a position. */ - private def instrument(tree: Tree, pos: SourcePosition, branch: Boolean)(using Context): Tree = - if pos.exists && !pos.span.isZeroExtent then - val statementId = recordStatement(tree, pos, branch) - insertInvokeCall(tree, pos, statementId) - else - tree - - /** Instruments the body of a DefDef. Handles corner cases. */ + // ensure that the body is always instrumented by inserting a call to Invoker.invoked at its beginning + val coverageCall = createInvokeCall(tree.body, pos) + val transformedBody = transform(tree.body) + val instrumentedBody = InstrumentedParts.singleExprTree(coverageCall, transformedBody) + + cpy.CaseDef(tree)(pat, transformedGuard, instrumentedBody) + + /** Transforms the parents of a Template. */ + private def transformTemplateParents(parents: List[Tree])(using Context): List[Tree] = + def transformParent(parent: Tree): Tree = parent match + case tree: Apply => + // only instrument the args, not the constructor call + cpy.Apply(tree)(tree.fun, tree.args.mapConserve(transform)) + case tree: TypeApply => + // args are types, instrument the fun with transformParent + cpy.TypeApply(tree)(transformParent(tree.fun), tree.args) + case other => + // should always be a TypeTree, nothing to instrument + other + + parents.mapConserve(transformParent) + + /** Instruments the body of a DefDef. Handles corner cases. + * Given a DefDef f like this: + * ``` + * def f(params) = rhs + * ``` + * + * It generally inserts a "coverage call" before rhs: + * ``` + * def f(params) = + * Invoker.invoked(id, DIR) + * rhs + * ``` + * + * But in some cases (e.g. closures), this would be invalid (see the comment below), + * and the call is inserted at another place. + */ private def instrumentBody(parent: DefDef, body: Tree)(using Context): Tree = /* recurse on closures, so that we insert the call at the leaf: @@ -256,21 +412,26 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: val namePos = parent.namePos val pos = namePos.withSpan(namePos.span.withStart(parent.span.start)) // record info and insert call to Invoker.invoked - val statementId = recordStatement(parent, pos, false) - insertInvokeCall(body, pos, statementId) - - /** Returns the tree, prepended by a call to Invoker.invoked */ - private def insertInvokeCall(tree: Tree, pos: SourcePosition, statementId: Int)(using Context): Tree = - val callSpan = syntheticSpan(pos) - Block(invokeCall(statementId, callSpan) :: Nil, tree).withSpan(callSpan.union(tree.span)) + val coverageCall = createInvokeCall(parent, pos) + InstrumentedParts.singleExprTree(coverageCall, body) - /** Generates Invoker.invoked(id, DIR) */ - private def invokeCall(id: Int, span: Span)(using Context): Tree = - val outputPath = ctx.settings.coverageOutputDir.value - ref(defn.InvokedMethodRef).withSpan(span) - .appliedToArgs( - List(Literal(Constant(id)), Literal(Constant(outputPath))) - ).withSpan(span) + /** Instruments the body of a secondary constructor DefDef. + * + * We must preserve the delegate constructor call as the first statement of + * the rhs Block, otherwise `HoistSuperArgs` will not be happy (see #17042). + */ + private def instrumentSecondaryCtor(ctorDef: DefDef)(using Context): Tree = + // compute position like in instrumentBody + val namePos = ctorDef.namePos + val pos = namePos.withSpan(namePos.span.withStart(ctorDef.span.start)) + val coverageCall = createInvokeCall(ctorDef, pos) + + ctorDef.rhs match + case b @ Block(delegateCtorCall :: stats, expr: Literal) => + cpy.Block(b)(transform(delegateCtorCall) :: coverageCall :: stats.mapConserve(transform), expr) + case rhs => + cpy.Block(rhs)(transform(rhs) :: coverageCall :: Nil, unitLiteral) + end instrumentSecondaryCtor /** * Checks if the apply needs a lift in the coverage phase. @@ -307,19 +468,17 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: nestedApplyNeedsLift || !isUnliftableFun(fun) && !tree.args.isEmpty && !tree.args.forall(LiftCoverage.noLift) - /** Check if the body of a DefDef can be instrumented with instrumentBody. */ - private def canInstrumentDefDef(tree: DefDef)(using Context): Boolean = - // No need to force the instrumentation of synthetic definitions - // (it would work, but it looks better without). - !tree.symbol.isOneOf(Accessor | Synthetic | Artifact) && - !tree.rhs.isEmpty - /** Check if an Apply can be instrumented. Prevents this phase from generating incorrect code. */ private def canInstrumentApply(tree: Apply)(using Context): Boolean = + def isSecondaryCtorDelegateCall: Boolean = tree.fun match + case Select(This(_), nme.CONSTRUCTOR) => true + case _ => false + val sym = tree.symbol - !sym.isOneOf(Synthetic | Artifact) && // no need to instrument synthetic apply - !isCompilerIntrinsicMethod(sym) && - (tree.typeOpt match + !sym.isOneOf(ExcludeMethodFlags) + && !isCompilerIntrinsicMethod(sym) + && !(sym.isClassConstructor && isSecondaryCtorDelegateCall) + && (tree.typeOpt match case AppliedType(tycon: NamedType, _) => /* If the last expression in a block is a context function, we'll try to summon its arguments at the current point, even if the expected type @@ -351,9 +510,10 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: * in post-erasure checking. */ private def canInstrumentParameterless(sym: Symbol)(using Context): Boolean = - sym.is(Method, butNot = Synthetic | Artifact) && - sym.info.isParameterless && - !isCompilerIntrinsicMethod(sym) + sym.is(Method, butNot = ExcludeMethodFlags) + && sym.info.isParameterless + && !isCompilerIntrinsicMethod(sym) + && !sym.info.typeSymbol.name.isContextFunction // exclude context functions like in canInstrumentApply /** Does sym refer to a "compiler intrinsic" method, which only exist during compilation, * like Any.isInstanceOf? @@ -370,15 +530,27 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: object InstrumentCoverage: val name: String = "instrumentCoverage" val description: String = "instrument code for coverage checking" - - /** Extractor object for trees produced by `insertInvokeCall`. */ - object InstrumentedBlock: - private def isInvokedCall(app: Apply)(using Context): Boolean = - app.span.isSynthetic && app.symbol == defn.InvokedMethodRef.symbol - - def unapply(t: Tree)(using Context): Option[(Apply, Tree)] = - t match - case Block((app: Apply) :: Nil, expr) if isInvokedCall(app) => - Some((app, expr)) - case _ => - None + val ExcludeMethodFlags: FlagSet = Synthetic | Artifact | Erased + + /** + * An instrumented Tree, in 3 parts. + * @param pre preparation code, e.g. lifted arguments. May be empty. + * @param invokeCall call to Invoker.invoked(dir, id), or an empty tree. + * @param expr the instrumented expression, executed just after the invokeCall + */ + case class InstrumentedParts(pre: List[Tree], invokeCall: Apply | EmptyTree.type, expr: Tree): + require(pre.isEmpty || (pre.nonEmpty && !invokeCall.isEmpty), "if pre isn't empty then invokeCall shouldn't be empty") + + /** Turns this into an actual Tree. */ + def toTree(using Context): Tree = + if invokeCall.isEmpty then expr + else if pre.isEmpty then Block(invokeCall :: Nil, expr) + else Block(pre :+ invokeCall, expr) + + object InstrumentedParts: + def notCovered(expr: Tree) = InstrumentedParts(Nil, EmptyTree, expr) + def singleExpr(invokeCall: Apply, expr: Tree) = InstrumentedParts(Nil, invokeCall, expr) + + /** Shortcut for `singleExpr(call, expr).toTree` */ + def singleExprTree(invokeCall: Apply, expr: Tree)(using Context): Tree = + Block(invokeCall :: Nil, expr) diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala index ad068b84c041..c95500d856be 100644 --- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala @@ -65,7 +65,7 @@ class InterceptedMethods extends MiniPhase { override def transformApply(tree: Apply)(using Context): Tree = { lazy val qual = tree.fun match { case Select(qual, _) => qual - case ident @ Ident(_) => + case ident: Ident => ident.tpe match { case TermRef(prefix: TermRef, _) => tpd.ref(prefix) diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index c32ea61cff2b..b433e37e39c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -2,30 +2,33 @@ package dotty.tools.dotc package transform import java.util.IdentityHashMap - import ast.tpd import core.Annotations.Annotation import core.Constants.Constant -import core.Contexts._ -import core.Decorators._ +import core.Contexts.* +import core.Decorators.* import core.DenotTransformers.IdentityDenotTransformer -import core.Flags._ -import core.NameKinds.{LazyBitMapName, LazyLocalInitName, LazyLocalName, ExpandedName} +import core.Flags.* +import core.NameKinds.{ExpandedName, LazyBitMapName, LazyLocalInitName, LazyLocalName} import core.StdNames.nme -import core.Symbols._ -import core.Types._ +import core.Symbols.* +import core.Types.* import core.{Names, StdNames} +import dotty.tools.dotc.config.Feature import transform.MegaPhase.MiniPhase -import transform.SymUtils._ +import transform.SymUtils.* + import scala.collection.mutable class LazyVals extends MiniPhase with IdentityDenotTransformer { import LazyVals._ import tpd._ - /** this map contains mutable state of transformation: OffsetDefs to be appended to companion object definitions, - * and number of bits currently used */ - class OffsetInfo(var defs: List[Tree], var ord:Int) + /** + * The map contains the list of the offset trees. + */ + class OffsetInfo(var defs: List[Tree], var ord: Int = 0) + private val appendOffsetDefs = mutable.Map.empty[Symbol, OffsetInfo] override def phaseName: String = LazyVals.name @@ -52,6 +55,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { else nullables.toList } + private def needsBoxing(tp: Type)(using Context): Boolean = tp.classSymbol.isPrimitiveValueClass override def prepareForUnit(tree: Tree)(using Context): Context = { if (lazyValNullables == null) @@ -62,7 +66,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { override def transformDefDef(tree: DefDef)(using Context): Tree = transformLazyVal(tree) - override def transformValDef(tree: ValDef)(using Context): Tree = transformLazyVal(tree) @@ -103,19 +106,17 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { /** Append offset fields to companion objects - */ + */ override def transformTemplate(template: Template)(using Context): Tree = { val cls = ctx.owner.asClass - appendOffsetDefs.get(cls) match { case None => template case Some(data) => - data.defs.foreach(_.symbol.addAnnotation(Annotation(defn.ScalaStaticAnnot))) + data.defs.foreach(defin => defin.symbol.addAnnotation(Annotation(defn.ScalaStaticAnnot, defin.symbol.span))) cpy.Template(template)(body = addInFront(data.defs, template.body)) } } - private def addInFront(prefix: List[Tree], stats: List[Tree]) = stats match { case first :: rest if isSuperConstrCall(first) => first :: prefix ::: rest case _ => prefix ::: stats @@ -186,7 +187,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { Thicket(holderTree, initTree, accessor) } - override def transformStats(trees: List[tpd.Tree])(using Context): List[Tree] = { // backend requires field usage to be after field definition // need to bring containers to start of method @@ -274,6 +274,223 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } } + /** + * Create a threadsafe lazy accessor and function that computes the field's value. `Evaluating` and + * `NullValue` are represented by `object`s and `Waiting` by a class that allows awaiting the completion + * of the evaluation. Note that since tail-recursive functions are transformed *before* lazy-vals, + * this implementation does involve explicit while loop. `PatternMatcher` is coming before `LazyVals`, + * therefore the pattern matching is implemented using if-s. + * + * ``` + * private @volatile var _x: AnyRef = null + * + * def x: A = + * val result = _x + * if result.isInstanceOf[A] then + * result // possible unboxing applied here + * else if result.eq(NullValue) then + * null // possible unboxing applied here + * else + * x_compute() // possible unboxing applied here + * + * private def x_compute(): AnyRef = + * while do + * val current: AnyRef = _x + * if current.eq(null) then + * if CAS(_x, null, Evaluating) then + * var resultNullable: AnyRef = null + * var result: AnyRef = null + * try + * resultNullable = rhs + * nullable = null // nulls out the nullable fields used only in initialization + * if resultNullable.eq(null) then + * result = NullValue + * else + * result = resultNullable + * finally + * if !CAS(_x, Evaluating, result) then + * val lock = _x.asInstanceOf[Waiting] + * CAS(_x, lock, result) + * lock.release() + * return resultNullable + * else + * if current.isInstanceOf[LazyValControlState] then + * if current.eq(Evaluating) then // To avoid creating Waiting instance + * CAS(current, current, new Waiting) + * else if current.isInstanceOf[Waiting] then + * current.asInstanceOf[Waiting].await() + * else return null + * else + * return current + * end while + * * ``` + * + * @param memberDef the transformed lazy field member definition + * @param claz the class containing this lazy val field + * @param target the target synthetic field + * @param offset the offset of the field in the storage allocation of the class + * @param thiz a reference to the transformed class + */ + def mkThreadSafeDef(memberDef: ValOrDefDef, + claz: ClassSymbol, + target: Symbol, + offset: Tree, + thiz: Tree)(using Context): (DefDef, DefDef) = { + val tp = memberDef.tpe.widenDealias.resultType.widenDealias + val waiting = ref(defn.LazyValsWaitingState) + val controlState = ref(defn.LazyValsControlState) + val evaluating = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.evaluating) + val nullValue = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.nullValue) + val objCasFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.objCas) + val accessorMethodSymbol = memberDef.symbol.asTerm + val lazyInitMethodName = LazyLocalInitName.fresh(memberDef.name.asTermName) + val lazyInitMethodSymbol = newSymbol(claz, lazyInitMethodName, Synthetic | Method | Private, MethodType(Nil)(_ => Nil, _ => defn.ObjectType)) + + val rhs = memberDef.rhs + val rhsMappedOwner = rhs.changeOwnerAfter(memberDef.symbol, lazyInitMethodSymbol, this) + val valueSymbol = newSymbol(accessorMethodSymbol, lazyNme.result, Synthetic, defn.ObjectType) + + val immediateValueCondition = + if (defn.LazyValsControlState.isSubClass(tp.classSymbol)) then + ref(valueSymbol).select(defn.Any_!=).appliedTo(nullLiteral).select(nme.And).appliedTo(ref(valueSymbol) + .select(defn.Any_isInstanceOf).appliedToType(defn.LazyValsControlState.typeRef) + .select(nme.UNARY_!).appliedToNone) + else + ref(valueSymbol).select(defn.Any_isInstanceOf).appliedToType(tp) + + val accessorBody = + Block( + ValDef(valueSymbol, ref(target)) :: Nil, + If( // if _x != null && !_x.isInstanceOf[LazyValControlState] then + immediateValueCondition, + ref(valueSymbol).ensureConforms(tp), // then return _x.asInstanceOf[A] + If( + ref(valueSymbol).select(defn.Object_eq).appliedTo(nullValue), + nullLiteral.ensureConforms(tp), + ref(lazyInitMethodSymbol).ensureApplied.ensureConforms(tp) // else return x_compute() + ) + ) + ) + + val accessorDef = DefDef(accessorMethodSymbol, accessorBody) + + // if observed a null (uninitialized) value + val initialize = { + // var result: AnyRef + val resSymbNullable = newSymbol(lazyInitMethodSymbol, lazyNme.resultNullable, Synthetic | Mutable, defn.ObjectType) + val resSymb = newSymbol(lazyInitMethodSymbol, lazyNme.result, Synthetic | Mutable, defn.ObjectType) + // releasing block in finally + val lockRel = { + val lockSymb = newSymbol(lazyInitMethodSymbol, lazyNme.lock, Synthetic, waiting.typeOpt) + Block(ValDef(lockSymb, ref(target).cast(waiting.typeOpt)) + :: objCasFlag.appliedTo(thiz, offset, ref(lockSymb), ref(resSymb)) :: Nil, + ref(lockSymb).select(lazyNme.RLazyVals.waitingRelease).ensureApplied) + } + // finally block + val fin = If( + objCasFlag.appliedTo(thiz, offset, evaluating, ref(resSymb)).select(nme.UNARY_!).appliedToNone, + lockRel, + unitLiteral + ) + // entire try block + val evaluate = Try( + + Block( + (Assign(ref(resSymbNullable), if needsBoxing(tp) && rhsMappedOwner != EmptyTree then rhsMappedOwner.ensureConforms(defn.boxedType(tp)) else rhsMappedOwner) // try result = rhs + :: If( + ref(resSymbNullable).select(defn.Object_eq).appliedTo(nullLiteral), + Assign(ref(resSymb), nullValue), + Assign(ref(resSymb), ref(resSymbNullable)) + ) :: Nil) + ::: nullOut(nullableFor(accessorMethodSymbol)), + unitLiteral), + Nil, + fin + ) + // if CAS(_, null, Evaluating) + If( + objCasFlag.appliedTo(thiz, offset, nullLiteral, evaluating), + Block(ValDef(resSymb, nullLiteral) :: ValDef(resSymbNullable, nullLiteral) :: evaluate :: Nil, // var result: AnyRef = null + Return(ref(resSymbNullable), lazyInitMethodSymbol)), + unitLiteral + ).withType(defn.UnitType) + } + + val current = newSymbol(lazyInitMethodSymbol, lazyNme.current, Synthetic, defn.ObjectType) + val ifNotUninitialized = + If( + ref(current).select(defn.Any_isInstanceOf).appliedToTypeTree(controlState), + // if a control state + If( + ref(current).select(defn.Object_eq).appliedTo(evaluating), + // if is Evaluating then CAS(_, Evaluating, new Waiting) + Block( + objCasFlag.appliedTo(thiz, offset, ref(current), Select(New(waiting), StdNames.nme.CONSTRUCTOR).ensureApplied) :: Nil, + unitLiteral + ), + // if not Evaluating + If( + ref(current).select(defn.Any_isInstanceOf).appliedToTypeTree(waiting), + // if is waiting + ref(current).select(defn.Any_asInstanceOf).appliedToTypeTree(waiting).select(lazyNme.RLazyVals.waitingAwaitRelease, _.info.paramInfoss.exists(_.size == 0)).ensureApplied, + Return(nullLiteral, lazyInitMethodSymbol) + ) + ), + // if not a control state + Return(ref(current), lazyInitMethodSymbol) + ) + + val initBody = Block(ValDef(current, ref(target)) :: Nil, If(ref(current).select(defn.Object_eq).appliedTo(nullLiteral), initialize, ifNotUninitialized).withType(defn.UnitType)) + val initMainLoop = WhileDo(EmptyTree, initBody) // becomes: while (true) do { body } + val initMethodDef = DefDef(lazyInitMethodSymbol, initMainLoop) + (accessorDef, initMethodDef) + } + + def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { + assert(!(x.symbol is Mutable)) + if ctx.settings.YlegacyLazyVals.value then + transformMemberDefThreadSafeLegacy(x) + else + transformMemberDefThreadSafeNew(x) + } + + def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = { + import dotty.tools.dotc.core.Types._ + import dotty.tools.dotc.core.Flags._ + + val claz = x.symbol.owner.asClass + val thizClass = Literal(Constant(claz.info)) + + def offsetName(id: Int) = s"${StdNames.nme.LAZY_FIELD_OFFSET}${if (x.symbol.owner.is(Module)) "_m_" else ""}$id".toTermName + val containerName = LazyLocalName.fresh(x.name.asTermName) + val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags | Private, defn.ObjectType, coord = x.symbol.coord).enteredAfter(this) + containerSymbol.addAnnotation(Annotation(defn.VolatileAnnot, containerSymbol.span)) // private @volatile var _x: AnyRef + containerSymbol.addAnnotations(x.symbol.annotations) // pass annotations from original definition + containerSymbol.removeAnnotation(defn.ScalaStaticAnnot) + val getOffset = + Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffsetStatic) + val containerTree = ValDef(containerSymbol, nullLiteral) + + // create an offset for this lazy val + val offsetSymbol: TermSymbol = appendOffsetDefs.get(claz) match + case Some(info) => + newSymbol(claz, offsetName(info.defs.size), Synthetic, defn.LongType).enteredAfter(this) + case None => + newSymbol(claz, offsetName(0), Synthetic, defn.LongType).enteredAfter(this) + offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot, offsetSymbol.nn.span)) + val fieldTree = thizClass.select(lazyNme.RLazyVals.getDeclaredField).appliedTo(Literal(Constant(containerName.mangledString))) + val offsetTree = ValDef(offsetSymbol.nn, getOffset.appliedTo(fieldTree)) + val offsetInfo = appendOffsetDefs.getOrElseUpdate(claz, new OffsetInfo(Nil)) + offsetInfo.defs = offsetTree :: offsetInfo.defs + val offset = ref(offsetSymbol.nn) + + val swapOver = + This(claz) + + val (accessorDef, initMethodDef) = mkThreadSafeDef(x, claz, containerSymbol, offset, swapOver) + Thicket(containerTree, accessorDef, initMethodDef) + } + /** Create a threadsafe lazy accessor equivalent to such code * ``` * def methodSymbol(): Int = { @@ -305,7 +522,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * } * ``` */ - def mkThreadSafeDef(methodSymbol: TermSymbol, + def mkThreadSafeDefLegacy(methodSymbol: TermSymbol, claz: ClassSymbol, ord: Int, target: Symbol, @@ -374,15 +591,12 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { DefDef(methodSymbol, loop) } - def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { - assert(!(x.symbol is Mutable)) - + def transformMemberDefThreadSafeLegacy(x: ValOrDefDef)(using Context): Thicket = { val tpe = x.tpe.widen.resultType.widen val claz = x.symbol.owner.asClass val thizClass = Literal(Constant(claz.info)) - val helperModule = requiredModule("scala.runtime.LazyVals") - val getOffset = Select(ref(helperModule), lazyNme.RLazyVals.getOffset) - val getOffsetStatic = Select(ref(helperModule), lazyNme.RLazyVals.getOffsetStatic) + val getOffset = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffset) + val getOffsetStatic = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffsetStatic) var offsetSymbol: TermSymbol | Null = null var flag: Tree = EmptyTree var ord = 0 @@ -403,7 +617,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { .symbol.asTerm else { // need to create a new flag offsetSymbol = newSymbol(claz, offsetById, Synthetic, defn.LongType).enteredAfter(this) - offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot)) + offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot, offsetSymbol.nn.span)) val flagName = LazyBitMapName.fresh(id.toString.toTermName) val flagSymbol = newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this) flag = ValDef(flagSymbol, Literal(Constant(0L))) @@ -414,7 +628,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { case None => offsetSymbol = newSymbol(claz, offsetName(0), Synthetic, defn.LongType).enteredAfter(this) - offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot)) + offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot, offsetSymbol.nn.span)) val flagName = LazyBitMapName.fresh("0".toTermName) val flagSymbol = newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this) flag = ValDef(flagSymbol, Literal(Constant(0L))) @@ -425,17 +639,16 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val containerName = LazyLocalName.fresh(x.name.asTermName) val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags, tpe, coord = x.symbol.coord).enteredAfter(this) - val containerTree = ValDef(containerSymbol, defaultValue(tpe)) val offset = ref(offsetSymbol.nn) - val getFlag = Select(ref(helperModule), lazyNme.RLazyVals.get) - val setFlag = Select(ref(helperModule), lazyNme.RLazyVals.setFlag) - val wait = Select(ref(helperModule), lazyNme.RLazyVals.wait4Notification) - val state = Select(ref(helperModule), lazyNme.RLazyVals.state) - val cas = Select(ref(helperModule), lazyNme.RLazyVals.cas) + val getFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.get) + val setFlag = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.setFlag) + val wait = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.wait4Notification) + val state = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.state) + val cas = Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.cas) - val accessor = mkThreadSafeDef(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait) + val accessor = mkThreadSafeDefLegacy(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait) if (flag eq EmptyTree) Thicket(containerTree, accessor) else Thicket(containerTree, flag, accessor) @@ -445,26 +658,34 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { object LazyVals { val name: String = "lazyVals" val description: String = "expand lazy vals" - object lazyNme { import Names.TermName object RLazyVals { import scala.runtime.LazyVals.{Names => N} - val get: TermName = N.get.toTermName - val setFlag: TermName = N.setFlag.toTermName - val wait4Notification: TermName = N.wait4Notification.toTermName - val state: TermName = N.state.toTermName - val cas: TermName = N.cas.toTermName - val getOffset: TermName = N.getOffset.toTermName - val getOffsetStatic: TermName = "getOffsetStatic".toTermName - val getDeclaredField: TermName = "getDeclaredField".toTermName + val waitingAwaitRelease: TermName = "await".toTermName + val waitingRelease: TermName = "countDown".toTermName + val evaluating: TermName = "Evaluating".toTermName + val nullValue: TermName = "NullValue".toTermName + val objCas: TermName = "objCAS".toTermName + val get: TermName = N.get.toTermName + val setFlag: TermName = N.setFlag.toTermName + val wait4Notification: TermName = N.wait4Notification.toTermName + val state: TermName = N.state.toTermName + val cas: TermName = N.cas.toTermName + val getOffset: TermName = N.getOffset.toTermName + val getOffsetStatic: TermName = "getOffsetStatic".toTermName + val getDeclaredField: TermName = "getDeclaredField".toTermName } val flag: TermName = "flag".toTermName val state: TermName = "state".toTermName val result: TermName = "result".toTermName + val resultNullable: TermName = "resultNullable".toTermName val value: TermName = "value".toTermName val initialized: TermName = "initialized".toTermName val initialize: TermName = "initialize".toTermName val retry: TermName = "retry".toTermName + val current: TermName = "current".toTermName + val lock: TermName = "lock".toTermName + val discard: TermName = "discard".toTermName } } diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala new file mode 100644 index 000000000000..cc2e6118d1fa --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala @@ -0,0 +1,142 @@ +package dotty.tools.dotc +package transform + +import scala.language.unsafeNulls + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.Trees.* +import dotty.tools.dotc.config.Printers.{macroAnnot => debug} +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.DenotTransformers.DenotTransformer +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.MacroClassLoader +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.quoted.* +import dotty.tools.dotc.util.SrcPos +import scala.quoted.runtime.impl.{QuotesImpl, SpliceScope} + +import scala.quoted.Quotes +import scala.util.control.NonFatal + +import java.lang.reflect.InvocationTargetException + +class MacroAnnotations: + import tpd.* + import MacroAnnotations.* + + /** Expands every macro annotation that is on this tree. + * Returns a list with transformed definition and any added definitions. + */ + def expandAnnotations(tree: MemberDef)(using Context): List[DefTree] = + if !hasMacroAnnotation(tree.symbol) then + List(tree) + else if tree.symbol.is(Module) && !tree.symbol.isClass then + // only class is transformed + List(tree) + else if tree.symbol.isType && !tree.symbol.isClass then + report.error("macro annotations are not supported on type", tree) + List(tree) + else + debug.println(i"Expanding macro annotations of:\n$tree") + + val macroInterpreter = new Interpreter(tree.srcPos, MacroClassLoader.fromContext) + + val allTrees = List.newBuilder[DefTree] + var insertedAfter: List[List[DefTree]] = Nil + + // Apply all macro annotation to `tree` and collect new definitions in order + val transformedTree: DefTree = tree.symbol.annotations.foldLeft(tree) { (tree, annot) => + if isMacroAnnotation(annot) then + debug.println(i"Expanding macro annotation: ${annot}") + + // Interpret call to `new myAnnot(..).transform(using )()` + val transformedTrees = + try callMacro(macroInterpreter, tree, annot) + catch + // TODO: Replace this case when scala.annaotaion.MacroAnnotation is no longer experimental and reflectiveSelectable is not used + // Replace this case with the nested cases. + case ex0: InvocationTargetException => + ex0.getCause match + case ex: scala.quoted.runtime.StopMacroExpansion => + if !ctx.reporter.hasErrors then + report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", annot.tree) + List(tree) + case Interpreter.MissingClassDefinedInCurrentRun(sym) => + Interpreter.suspendOnMissing(sym, annot.tree) + case NonFatal(ex) => + val stack0 = ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.MacroAnnotations") + val stack = stack0.take(1 + stack0.lastIndexWhere(_.getMethodName == "transform")) + val msg = + em"""Failed to evaluate macro. + | Caused by ${ex.getClass}: ${if (ex.getMessage == null) "" else ex.getMessage} + | ${stack.mkString("\n ")} + |""" + report.error(msg, annot.tree) + List(tree) + case _ => + throw ex0 + transformedTrees.span(_.symbol != tree.symbol) match + case (prefixed, newTree :: suffixed) => + allTrees ++= prefixed + insertedAfter = suffixed :: insertedAfter + prefixed.foreach(checkMacroDef(_, tree, annot)) + suffixed.foreach(checkMacroDef(_, tree, annot)) + transform.TreeChecker.checkMacroGeneratedTree(tree, newTree) + newTree + case (Nil, Nil) => + report.error(i"Unexpected `Nil` returned by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) + tree + case (_, Nil) => + report.error(i"Transformed tree for ${tree} was not return by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) + tree + else + tree + } + + allTrees += transformedTree + insertedAfter.foreach(allTrees.++=) + + val result = allTrees.result() + debug.println(result.map(_.show).mkString("expanded to:\n", "\n", "")) + result + + /** Interpret the code `new annot(..).transform(using )()` */ + private def callMacro(interpreter: Interpreter, tree: MemberDef, annot: Annotation)(using Context): List[MemberDef] = + // TODO: Remove when scala.annaotaion.MacroAnnotation is no longer experimental + import scala.reflect.Selectable.reflectiveSelectable + type MacroAnnotation = { + def transform(using Quotes)(tree: Object/*Erased type of quotes.refelct.Definition*/): List[MemberDef /*quotes.refelct.Definition known to be MemberDef in QuotesImpl*/] + } + + // Interpret macro annotation instantiation `new myAnnot(..)` + val annotInstance = interpreter.interpret[MacroAnnotation](annot.tree).get + // TODO: Remove when scala.annaotaion.MacroAnnotation is no longer experimental + assert(annotInstance.getClass.getClassLoader.loadClass("scala.annotation.MacroAnnotation").isInstance(annotInstance)) + + val quotes = QuotesImpl()(using SpliceScope.contextWithNewSpliceScope(tree.symbol.sourcePos)(using MacroExpansion.context(tree)).withOwner(tree.symbol.owner)) + annotInstance.transform(using quotes)(tree.asInstanceOf[quotes.reflect.Definition]) + + /** Check that this tree can be added by the macro annotation */ + private def checkMacroDef(newTree: DefTree, annotatedTree: Tree, annot: Annotation)(using Context) = + transform.TreeChecker.checkMacroGeneratedTree(annotatedTree, newTree) + val sym = newTree.symbol + val annotated = annotatedTree.symbol + if sym.isType && !sym.isClass then + report.error(i"macro annotation cannot return a `type`. $annot tried to add $sym", annot.tree) + else if sym.owner != annotated.owner && !(annotated.owner.isPackageObject && (sym.isClass || sym.is(Module)) && sym.owner == annotated.owner.owner) then + report.error(i"macro annotation $annot added $sym with an inconsistent owner. Expected it to be owned by ${annotated.owner} but was owned by ${sym.owner}.", annot.tree) + else if annotated.isClass && annotated.owner.is(Package) /*&& !sym.isClass*/ then + report.error(i"macro annotation can not add top-level ${sym.showKind}. $annot tried to add $sym.", annot.tree) + +object MacroAnnotations: + + /** Is this an annotation that implements `scala.annation.MacroAnnotation` */ + def isMacroAnnotation(annot: Annotation)(using Context): Boolean = + annot.tree.symbol.maybeOwner.derivesFrom(defn.MacroAnnotationClass) + + /** Is this symbol annotated with an annotation that implements `scala.annation.MacroAnnotation` */ + def hasMacroAnnotation(sym: Symbol)(using Context): Boolean = + sym.getAnnotation(defn.MacroAnnotationClass).isDefined diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala index 27ccd622bc65..7bb7ed365ebe 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -38,10 +38,10 @@ abstract class MacroTransform extends Phase { tree case _: PackageDef | _: MemberDef => super.transform(tree)(using localCtx(tree)) - case impl @ Template(constr, parents, self, _) => + case impl @ Template(constr, _, self, _) => cpy.Template(tree)( transformSub(constr), - transform(parents)(using ctx.superCallContext), + transform(impl.parents)(using ctx.superCallContext), Nil, transformSelf(self), transformStats(impl.body, tree.symbol)) diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala index 9d241216bdaa..d4dd911241d3 100644 --- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala @@ -456,7 +456,7 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { } def transformTrees(trees: List[Tree], start: Int)(using Context): List[Tree] = - trees.mapInline(transformTree(_, start)) + trees.flattenedMapConserve(transformTree(_, start)) def transformSpecificTrees[T <: Tree](trees: List[T], start: Int)(using Context): List[T] = transformTrees(trees, start).asInstanceOf[List[T]] diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala index d20f3e1a8da4..1392d00011a2 100644 --- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala +++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala @@ -4,7 +4,7 @@ package transform import core._ import DenotTransformers._ import Contexts._ -import Phases.phaseOf +import Phases.* import SymDenotations.SymDenotation import Denotations._ import Symbols._ @@ -16,8 +16,12 @@ import Flags._ import Decorators._ import StdNames.nme +import sjs.JSSymUtils._ + import util.Store +import dotty.tools.backend.sjs.JSDefinitions.jsdefn + object Memoize { val name: String = "memoize" val description: String = "add private fields to getters and setters" @@ -110,26 +114,10 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => flags = Private | (if (sym.is(StableRealizable)) EmptyFlags else Mutable), info = fieldType, coord = tree.span - ).withAnnotationsCarrying(sym, defn.FieldMetaAnnot) + ).withAnnotationsCarrying(sym, defn.FieldMetaAnnot, orNoneOf = defn.MetaAnnots) .enteredAfter(thisPhase) } - def addAnnotations(denot: Denotation): Unit = - denot match { - case fieldDenot: SymDenotation if sym.annotations.nonEmpty => - val cpy = fieldDenot.copySymDenotation() - cpy.annotations = sym.annotations - cpy.installAfter(thisPhase) - case _ => () - } - - def removeUnwantedAnnotations(denot: SymDenotation, metaAnnotSym: ClassSymbol): Unit = - if (sym.annotations.nonEmpty) { - val cpy = sym.copySymDenotation() - cpy.filterAnnotations(_.symbol.hasAnnotation(metaAnnotSym)) - cpy.installAfter(thisPhase) - } - val NoFieldNeeded = Lazy | Deferred | JavaDefined | Inline def erasedBottomTree(sym: Symbol) = @@ -142,14 +130,30 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => } if sym.is(Accessor, butNot = NoFieldNeeded) then + /* Tests whether the semantics of Scala.js require a field for this symbol, irrespective of any + * optimization we think we can do. This is the case if one of the following is true: + * - it is a member of a JS type, since it needs to be visible as a JavaScript field + * - is is exported as static member of the companion class, since it needs to be visible as a JavaScript static field + * - it is exported to the top-level, since that can only be done as a true top-level variable, i.e., a field + */ + def sjsNeedsField: Boolean = + ctx.settings.scalajs.value && ( + sym.owner.isJSType + || sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot) + || sym.hasAnnotation(jsdefn.JSExportStaticAnnot) + ) + def adaptToField(field: Symbol, tree: Tree): Tree = if (tree.isEmpty) tree else tree.ensureConforms(field.info.widen) def isErasableBottomField(field: Symbol, cls: Symbol): Boolean = - !field.isVolatile && ((cls eq defn.NothingClass) || (cls eq defn.NullClass) || (cls eq defn.BoxedUnitClass)) + !field.isVolatile + && ((cls eq defn.NothingClass) || (cls eq defn.NullClass) || (cls eq defn.BoxedUnitClass)) + && !sjsNeedsField if sym.isGetter then - val constantFinalVal = sym.isAllOf(Accessor | Final, butNot = Mutable) && tree.rhs.isInstanceOf[Literal] + val constantFinalVal = + sym.isAllOf(Accessor | Final, butNot = Mutable) && tree.rhs.isInstanceOf[Literal] && !sjsNeedsField if constantFinalVal then // constant final vals do not need to be transformed at all, and do not need a field tree @@ -163,8 +167,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => if isErasableBottomField(field, rhsClass) then erasedBottomTree(rhsClass) else transformFollowingDeep(ref(field))(using ctx.withOwner(sym)) val getterDef = cpy.DefDef(tree)(rhs = getterRhs) - addAnnotations(fieldDef.denot) - removeUnwantedAnnotations(sym, defn.GetterMetaAnnot) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot)) Thicket(fieldDef, getterDef) else if sym.isSetter then if (!sym.is(ParamAccessor)) { val Literal(Constant(())) = tree.rhs: @unchecked } // This is intended as an assertion @@ -190,7 +193,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => then Literal(Constant(())) else Assign(ref(field), adaptToField(field, ref(tree.termParamss.head.head.symbol))) val setterDef = cpy.DefDef(tree)(rhs = transformFollowingDeep(initializer)(using ctx.withOwner(sym))) - removeUnwantedAnnotations(sym, defn.SetterMetaAnnot) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.SetterMetaAnnot)) setterDef else // Curiously, some accessors from Scala2 have ' ' suffixes. diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala index 99702686edf8..db96aeefe231 100644 --- a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala +++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala @@ -46,7 +46,7 @@ class MoveStatics extends MiniPhase with SymTransformer { if (staticFields.nonEmpty) { /* do NOT put Flags.JavaStatic here. It breaks .enclosingClass */ val staticCostructor = newSymbol(orig.symbol, nme.STATIC_CONSTRUCTOR, Flags.Synthetic | Flags.Method | Flags.Private, MethodType(Nil, defn.UnitType)) - staticCostructor.addAnnotation(Annotation(defn.ScalaStaticAnnot)) + staticCostructor.addAnnotation(Annotation(defn.ScalaStaticAnnot, staticCostructor.span)) staticCostructor.entered val staticAssigns = staticFields.map(x => Assign(ref(x.symbol), x.rhs.changeOwner(x.symbol, staticCostructor))) diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala index 7e1ae9e661f6..a75d6da9dd6a 100644 --- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala +++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala @@ -6,6 +6,7 @@ import Contexts._, Symbols._, Types._, Flags._, StdNames._ import MegaPhase._ import NameKinds.NonLocalReturnKeyName import config.SourceVersion.* +import Decorators.em object NonLocalReturns { import ast.tpd._ @@ -96,7 +97,7 @@ class NonLocalReturns extends MiniPhase { override def transformReturn(tree: Return)(using Context): Tree = if isNonLocalReturn(tree) then report.gradualErrorOrMigrationWarning( - "Non local returns are no longer supported; use scala.util.control.NonLocalReturns instead", + em"Non local returns are no longer supported; use `boundary` and `boundary.break` in `scala.util` instead", tree.srcPos, warnFrom = `3.2`, errorFrom = future) diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index b27a75436d86..48dc7c818360 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -200,10 +200,13 @@ object OverridingPairs: /** Let `member` and `other` be members of some common class C with types * `memberTp` and `otherTp` in C. Are the two symbols considered an overriding * pair in C? We assume that names already match so we test only the types here. - * @param fallBack A function called if the initial test is false and - * `member` and `other` are term symbols. + * @param fallBack A function called if the initial test is false and + * `member` and `other` are term symbols. + * @param isSubType A function to be used for checking subtype relationships + * between term fields. */ - def isOverridingPair(member: Symbol, memberTp: Type, other: Symbol, otherTp: Type, fallBack: => Boolean = false)(using Context): Boolean = + def isOverridingPair(member: Symbol, memberTp: Type, other: Symbol, otherTp: Type, fallBack: => Boolean = false, + isSubType: (Type, Type) => Context ?=> Boolean = (tp1, tp2) => tp1 frozen_<:< tp2)(using Context): Boolean = if member.isType then // intersection of bounds to refined types must be nonempty memberTp.bounds.hi.hasSameKindAs(otherTp.bounds.hi) && ( @@ -222,6 +225,6 @@ object OverridingPairs: val relaxedOverriding = ctx.explicitNulls && (member.is(JavaDefined) || other.is(JavaDefined)) member.name.is(DefaultGetterName) // default getters are not checked for compatibility || memberTp.overrides(otherTp, relaxedOverriding, - member.matchNullaryLoosely || other.matchNullaryLoosely || fallBack) + member.matchNullaryLoosely || other.matchNullaryLoosely || fallBack, isSubType = isSubType) end OverridingPairs diff --git a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala index 94ea48e14efd..8c93ffb90232 100644 --- a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala +++ b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala @@ -30,7 +30,8 @@ import NameKinds.ParamAccessorName * The aim of this transformation is to avoid redundant parameter accessor fields. */ class ParamForwarding extends MiniPhase with IdentityDenotTransformer: - import ast.tpd._ + import ast.tpd.* + import ParamForwarding.inheritedAccessor private def thisPhase: ParamForwarding = this @@ -39,20 +40,6 @@ class ParamForwarding extends MiniPhase with IdentityDenotTransformer: override def description: String = ParamForwarding.description def transformIfParamAlias(mdef: ValOrDefDef)(using Context): Tree = - - def inheritedAccessor(sym: Symbol)(using Context): Symbol = - val candidate = sym.owner.asClass.superClass - .info.decl(sym.name).suchThat(_.is(ParamAccessor, butNot = Mutable)) - .symbol - if !candidate.is(Private) // candidate might be private and accessible if it is in an outer class - && candidate.isAccessibleFrom(currentClass.thisType, superAccess = true) - then - candidate - else if candidate.is(SuperParamAlias) then - inheritedAccessor(candidate) - else - NoSymbol - val sym = mdef.symbol.asTerm if sym.is(SuperParamAlias) then assert(sym.is(ParamAccessor, butNot = Mutable)) @@ -84,3 +71,17 @@ class ParamForwarding extends MiniPhase with IdentityDenotTransformer: object ParamForwarding: val name: String = "paramForwarding" val description: String = "add forwarders for aliases of superclass parameters" + + def inheritedAccessor(sym: Symbol)(using Context): Symbol = + val candidate = sym.owner.asClass.superClass + .info.decl(sym.name).suchThat(_.is(ParamAccessor, butNot = Mutable)) + .symbol + if !candidate.is(Private) // candidate might be private and accessible if it is in an outer class + && candidate.isAccessibleFrom(currentClass.thisType, superAccess = true) + then + candidate + else if candidate.is(SuperParamAlias) then + inheritedAccessor(candidate) + else + NoSymbol +end ParamForwarding \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 70fa0e5cc513..ac1e1868f26e 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -2,23 +2,26 @@ package dotty.tools package dotc package transform -import scala.annotation.tailrec import core._ import MegaPhase._ -import collection.mutable import Symbols._, Contexts._, Types._, StdNames._, NameOps._ +import patmat.SpaceEngine import util.Spans._ import typer.Applications.* import SymUtils._ import TypeUtils.* +import Annotations.* import Flags._, Constants._ import Decorators._ import NameKinds.{PatMatStdBinderName, PatMatAltsName, PatMatResultName} import config.Printers.patmatch import reporting._ -import dotty.tools.dotc.ast._ +import ast._ import util.Property._ +import scala.annotation.tailrec +import scala.collection.mutable + /** The pattern matching transform. * After this phase, the only Match nodes remaining in the code are simple switches * where every pattern is an integer or string constant @@ -45,9 +48,8 @@ class PatternMatcher extends MiniPhase { val translated = new Translator(matchType, this).translateMatch(tree) // check exhaustivity and unreachability - val engine = new patmat.SpaceEngine - engine.checkExhaustivity(tree) - engine.checkRedundancy(tree) + SpaceEngine.checkExhaustivity(tree) + SpaceEngine.checkRedundancy(tree) translated.ensureConforms(matchType) } @@ -664,12 +666,12 @@ object PatternMatcher { val refCount = varRefCount(plan) val LetPlan(topSym, _) = plan: @unchecked - def toDrop(sym: Symbol) = initializer.get(sym) match { - case Some(rhs) => + def toDrop(sym: Symbol) = + val rhs = initializer.lookup(sym) + if rhs != null then isPatmatGenerated(sym) && refCount(sym) <= 1 && sym != topSym && isPureExpr(rhs) - case none => + else false - } object Inliner extends PlanTransform { override val treeMap = new TreeMap { @@ -707,9 +709,9 @@ object PatternMatcher { // ----- Generating trees from plans --------------- /** The condition a test plan rewrites to */ - private def emitCondition(plan: TestPlan): Tree = { + private def emitCondition(plan: TestPlan): Tree = val scrutinee = plan.scrutinee - (plan.test: @unchecked) match { + (plan.test: @unchecked) match case NonEmptyTest => constToLiteral( scrutinee @@ -737,41 +739,49 @@ object PatternMatcher { case TypeTest(tpt, trusted) => val expectedTp = tpt.tpe - // An outer test is needed in a situation like `case x: y.Inner => ...` - def outerTestNeeded: Boolean = { - def go(expected: Type): Boolean = expected match { - case tref @ TypeRef(pre: SingletonType, _) => - tref.symbol.isClass && - ExplicitOuter.needsOuterIfReferenced(tref.symbol.asClass) - case AppliedType(tpe, _) => go(tpe) - case _ => - false - } - // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest` - // generates an outer test based on `patType.prefix` with automatically dealises. - go(expectedTp.dealias) - } + def typeTest(scrut: Tree, expected: Type): Tree = + val ttest = scrut.select(defn.Any_typeTest).appliedToType(expected) + if trusted then ttest.pushAttachment(TrustedTypeTestKey, ()) + ttest - def outerTest: Tree = thisPhase.transformFollowingDeep { - val expectedOuter = singleton(expectedTp.normalizedPrefix) - val expectedClass = expectedTp.dealias.classSymbol.asClass - ExplicitOuter.ensureOuterAccessors(expectedClass) - scrutinee.ensureConforms(expectedTp) - .outerSelect(1, expectedClass.owner.typeRef) - .select(defn.Object_eq) - .appliedTo(expectedOuter) - } + /** An outer test is needed in a situation like `case x: y.Inner => ... + * or like case x: O#Inner if the owner of Inner is not a subclass of O. + * Outer tests are added here instead of in TypeTestsCasts since they + * might cause outer accessors to be added to inner classes (via ensureOuterAccessors) + * and therefore have to run before ExplicitOuter. + */ + def addOuterTest(tree: Tree, expected: Type): Tree = expected.dealias match + case tref @ TypeRef(pre, _) => + tref.symbol match + case expectedCls: ClassSymbol if ExplicitOuter.needsOuterIfReferenced(expectedCls) => + def selectOuter = + ExplicitOuter.ensureOuterAccessors(expectedCls) + scrutinee.ensureConforms(expected).outerSelect(1, expectedCls.owner.typeRef) + if pre.isSingleton then + val expectedOuter = singleton(pre) + tree.and(selectOuter.select(defn.Object_eq).appliedTo(expectedOuter)) + else if !expectedCls.isStatic + && expectedCls.owner.isType + && !expectedCls.owner.derivesFrom(pre.classSymbol) + then + val testPre = + if expected.hasAnnotation(defn.UncheckedAnnot) then + AnnotatedType(pre, Annotation(defn.UncheckedAnnot, tree.span)) + else pre + tree.and(typeTest(selectOuter, testPre)) + else tree + case _ => tree + case AppliedType(tycon, _) => + addOuterTest(tree, tycon) + case _ => + tree - expectedTp.dealias match { + expectedTp.dealias match case expectedTp: SingletonType => scrutinee.isInstance(expectedTp) // will be translated to an equality test case _ => - val typeTest = scrutinee.select(defn.Any_typeTest).appliedToType(expectedTp) - if (trusted) typeTest.pushAttachment(TrustedTypeTestKey, ()) - if (outerTestNeeded) typeTest.and(outerTest) else typeTest - } - } - } + addOuterTest(typeTest(scrutinee, expectedTp), expectedTp) + end emitCondition @tailrec private def canFallThrough(plan: Plan): Boolean = plan match { diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala index f3ae6a377aab..62174c806f09 100644 --- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala @@ -19,7 +19,6 @@ import scala.collection.mutable import dotty.tools.dotc.core.Annotations._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import dotty.tools.dotc.inlines.Inlines import scala.annotation.constructorOnly @@ -93,7 +92,7 @@ class PickleQuotes extends MacroTransform { case _ => override def run(using Context): Unit = - if (ctx.compilationUnit.needsStaging) super.run(using freshStagingContext) + if (ctx.compilationUnit.needsStaging) super.run protected def newTransformer(using Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = @@ -113,12 +112,10 @@ class PickleQuotes extends MacroTransform { case _ => val (contents, tptWithHoles) = makeHoles(tpt) PickleQuotes(quotes, tptWithHoles, contents, tpt.tpe, true) - case tree: DefDef if tree.symbol.is(Macro) => + case tree: DefDef if !tree.rhs.isEmpty && tree.symbol.isInlineMethod => // Shrink size of the tree. The methods have already been inlined. // TODO move to FirstTransform to trigger even without quotes cpy.DefDef(tree)(rhs = defaultValue(tree.rhs.tpe)) - case _: DefDef if tree.symbol.isInlineMethod => - tree case _ => super.transform(tree) } @@ -160,13 +157,18 @@ class PickleQuotes extends MacroTransform { override def apply(tp: Type): Type = tp match case tp: TypeRef if tp.typeSymbol.isTypeSplice => apply(tp.dealias) - case tp @ TypeRef(pre, _) if pre == NoPrefix || pre.termSymbol.isLocal => + case tp @ TypeRef(pre, _) if isLocalPath(pre) => val hiBound = tp.typeSymbol.info match case info: ClassInfo => info.parents.reduce(_ & _) case info => info.hiBound apply(hiBound) case tp => mapOver(tp) + + private def isLocalPath(tp: Type): Boolean = tp match + case NoPrefix => true + case tp: TermRef if !tp.symbol.is(Package) => isLocalPath(tp.prefix) + case tp => false } /** Remove references to local types that will not be defined in this quote */ @@ -317,14 +319,17 @@ object PickleQuotes { defn.QuotedExprClass.typeRef.appliedTo(defn.AnyType)), args => val cases = termSplices.map { case (splice, idx) => - val defn.FunctionOf(argTypes, defn.FunctionOf(quotesType :: _, _, _, _), _, _) = splice.tpe: @unchecked + val defn.FunctionOf(argTypes, defn.FunctionOf(quotesType :: _, _, _), _) = splice.tpe: @unchecked val rhs = { val spliceArgs = argTypes.zipWithIndex.map { (argType, i) => args(1).select(nme.apply).appliedTo(Literal(Constant(i))).asInstance(argType) } val Block(List(ddef: DefDef), _) = splice: @unchecked // TODO: beta reduce inner closure? Or wait until BetaReduce phase? - BetaReduce(ddef, spliceArgs).select(nme.apply).appliedTo(args(2).asInstance(quotesType)) + BetaReduce( + splice + .select(nme.apply).appliedToArgs(spliceArgs)) + .select(nme.apply).appliedTo(args(2).asInstance(quotesType)) } CaseDef(Literal(Constant(idx)), EmptyTree, rhs) } diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 4d9b42a36fe7..f5fe34bafc2f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import core._ @@ -11,10 +12,10 @@ import Periods._ import Phases._ import Symbols._ import Flags.Module -import reporting.{ThrowingReporter, Profile} +import reporting.{ThrowingReporter, Profile, Message} import collection.mutable -import scala.concurrent.{Future, Await, ExecutionContext} -import scala.concurrent.duration.Duration +import util.concurrent.{Executor, Future} +import compiletime.uninitialized object Pickler { val name: String = "pickler" @@ -47,7 +48,7 @@ class Pickler extends Phase { // Maps that keep a record if -Ytest-pickler is set. private val beforePickling = new mutable.HashMap[ClassSymbol, String] - private val picklers = new mutable.HashMap[ClassSymbol, TastyPickler] + private val pickledBytes = new mutable.HashMap[ClassSymbol, Array[Byte]] /** Drop any elements of this list that are linked module classes of other elements in the list */ private def dropCompanionModuleClasses(clss: List[ClassSymbol])(using Context): List[ClassSymbol] = { @@ -56,6 +57,24 @@ class Pickler extends Phase { clss.filterNot(companionModuleClasses.contains) } + /** Runs given functions with a scratch data block in a serialized fashion (i.e. + * inside a synchronized block). Scratch data is re-used between calls. + * Used to conserve on memory usage by avoiding to create scratch data for each + * pickled unit. + */ + object serialized: + val scratch = new ScratchData + def run(body: ScratchData => Array[Byte]): Array[Byte] = + synchronized { + scratch.reset() + body(scratch) + } + + private val executor = Executor[Array[Byte]]() + + private def useExecutor(using Context) = + Pickler.ParallelPickling && !ctx.settings.YtestPickler.value + override def run(using Context): Unit = { val unit = ctx.compilationUnit pickling.println(i"unpickling in run ${ctx.runId}") @@ -64,25 +83,30 @@ class Pickler extends Phase { cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree)) tree <- sliceTopLevel(unit.tpdTree, cls) do + if ctx.settings.YtestPickler.value then beforePickling(cls) = tree.show + val pickler = new TastyPickler(cls) - if ctx.settings.YtestPickler.value then - beforePickling(cls) = tree.show - picklers(cls) = pickler val treePkl = new TreePickler(pickler) treePkl.pickle(tree :: Nil) Profile.current.recordTasty(treePkl.buf.length) - val positionWarnings = new mutable.ListBuffer[String]() - val pickledF = inContext(ctx.fresh) { - Future { - treePkl.compactify() + + val positionWarnings = new mutable.ListBuffer[Message]() + def reportPositionWarnings() = positionWarnings.foreach(report.warning(_)) + + def computePickled(): Array[Byte] = inContext(ctx.fresh) { + serialized.run { scratch => + treePkl.compactify(scratch) if tree.span.exists then val reference = ctx.settings.sourceroot.value - new PositionPickler(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference) - .picklePositions(unit.source, tree :: Nil, positionWarnings) + PositionPickler.picklePositions( + pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference, + unit.source, tree :: Nil, positionWarnings, + scratch.positionBuffer, scratch.pickledIndices) if !ctx.settings.YdropComments.value then - new CommentPickler(pickler, treePkl.buf.addrOfTree, treePkl.docString) - .pickleComment(tree) + CommentPickler.pickleComments( + pickler, treePkl.buf.addrOfTree, treePkl.docString, tree, + scratch.commentBuffer) val pickled = pickler.assembleParts() @@ -93,26 +117,40 @@ class Pickler extends Phase { // println(i"rawBytes = \n$rawBytes%\n%") // DEBUG if pickling ne noPrinter then - pickling.synchronized { - println(i"**** pickled info of $cls") - println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) - } + println(i"**** pickled info of $cls") + println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) pickled - }(using ExecutionContext.global) + } } - def force(): Array[Byte] = - val result = Await.result(pickledF, Duration.Inf) - positionWarnings.foreach(report.warning(_)) - result - - if !Pickler.ParallelPickling || ctx.settings.YtestPickler.value then force() - unit.pickled += (cls -> force) + /** A function that returns the pickled bytes. Depending on `Pickler.ParallelPickling` + * either computes the pickled data in a future or eagerly before constructing the + * function value. + */ + val demandPickled: () => Array[Byte] = + if useExecutor then + val futurePickled = executor.schedule(computePickled) + () => + try futurePickled.force.get + finally reportPositionWarnings() + else + val pickled = computePickled() + reportPositionWarnings() + if ctx.settings.YtestPickler.value then pickledBytes(cls) = pickled + () => pickled + + unit.pickled += (cls -> demandPickled) end for } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - val result = super.runOn(units) + val result = + if useExecutor then + executor.start() + try super.runOn(units) + finally executor.close() + else + super.runOn(units) if ctx.settings.YtestPickler.value then val ctx2 = ctx.fresh.setSetting(ctx.settings.YreadComments, true) testUnpickler( @@ -128,8 +166,8 @@ class Pickler extends Phase { pickling.println(i"testing unpickler at run ${ctx.runId}") ctx.initialize() val unpicklers = - for ((cls, pickler) <- picklers) yield { - val unpickler = new DottyUnpickler(pickler.assembleParts()) + for ((cls, bytes) <- pickledBytes) yield { + val unpickler = new DottyUnpickler(bytes) unpickler.enter(roots = Set.empty) cls -> unpickler } @@ -147,8 +185,9 @@ class Pickler extends Phase { if unequal then output("before-pickling.txt", previous) output("after-pickling.txt", unpickled) - report.error(s"""pickling difference for $cls in ${cls.source}, for details: - | - | diff before-pickling.txt after-pickling.txt""".stripMargin) + //sys.process.Process("diff -u before-pickling.txt after-pickling.txt").! + report.error(em"""pickling difference for $cls in ${cls.source}, for details: + | + | diff before-pickling.txt after-pickling.txt""") end testSame } diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 3db751df4145..7f3e47c14732 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import dotty.tools.dotc.ast.{Trees, tpd, untpd, desugar} @@ -14,6 +15,7 @@ import Decorators._ import Symbols._, SymUtils._, NameOps._ import ContextFunctionResults.annotateContextResults import config.Printers.typr +import config.Feature import util.SrcPos import reporting._ import NameKinds.WildcardParamName @@ -155,12 +157,20 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase checkInferredWellFormed(tree.tpt) if sym.is(Method) then if sym.isSetter then - removeUnwantedAnnotations(sym, defn.SetterMetaAnnot, NoSymbol, keepIfNoRelevantAnnot = false) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.SetterMetaAnnot)) + if sym.isOneOf(GivenOrImplicit) then + val cls = sym.info.finalResultType.classSymbol + if cls.isOneOf(GivenOrImplicit) then + sym.updateAnnotationsAfter(thisPhase, + atPhase(thisPhase)(cls.annotationsCarrying(Set(defn.CompanionMethodMetaAnnot))) + ++ sym.annotations) else if sym.is(Param) then - removeUnwantedAnnotations(sym, defn.ParamMetaAnnot, NoSymbol, keepIfNoRelevantAnnot = true) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.ParamMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) + else if sym.is(ParamAccessor) then + sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot)) else - removeUnwantedAnnotations(sym, defn.GetterMetaAnnot, defn.FieldMetaAnnot, keepIfNoRelevantAnnot = !sym.is(ParamAccessor)) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) if sym.isScala2Macro && !ctx.settings.XignoreScala2Macros.value then if !sym.owner.unforcedDecls.exists(p => !p.isScala2Macro && p.name == sym.name && p.signature == sym.signature) // Allow scala.reflect.materializeClassTag to be able to compile scala/reflect/package.scala @@ -182,17 +192,6 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase => Checking.checkAppliedTypesIn(tree) case _ => - private def removeUnwantedAnnotations(sym: Symbol, metaAnnotSym: Symbol, - metaAnnotSymBackup: Symbol, keepIfNoRelevantAnnot: Boolean)(using Context): Unit = - def shouldKeep(annot: Annotation): Boolean = - val annotSym = annot.symbol - annotSym.hasAnnotation(metaAnnotSym) - || annotSym.hasAnnotation(metaAnnotSymBackup) - || (keepIfNoRelevantAnnot && { - !annotSym.annotations.exists(metaAnnot => defn.FieldAccessorMetaAnnots.contains(metaAnnot.symbol)) - }) - if sym.annotations.nonEmpty then - sym.filterAnnotations(shouldKeep(_)) private def transformSelect(tree: Select, targs: List[Tree])(using Context): Tree = { val qual = tree.qualifier @@ -268,7 +267,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase def checkNotPackage(tree: Tree)(using Context): Tree = if !tree.symbol.is(Package) then tree - else errorTree(tree, i"${tree.symbol} cannot be used as a type") + else errorTree(tree, em"${tree.symbol} cannot be used as a type") override def transform(tree: Tree)(using Context): Tree = try tree match { @@ -276,7 +275,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case CaseDef(pat, _, _) => val gadtCtx = pat.removeAttachment(typer.Typer.InferredGadtConstraints) match - case Some(gadt) => ctx.fresh.setGadt(gadt) + case Some(gadt) => ctx.fresh.setGadtState(GadtState(gadt)) case None => ctx super.transform(tree)(using gadtCtx) @@ -301,19 +300,23 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase checkNoConstructorProxy(tree) transformSelect(tree, Nil) case tree: Apply => - val methType = tree.fun.tpe.widen + val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] val app = - if (methType.isErasedMethod) + if (methType.hasErasedParams) tpd.cpy.Apply(tree)( tree.fun, - tree.args.mapConserve(arg => - if (methType.isImplicitMethod && arg.span.isSynthetic) - arg match - case _: RefTree | _: Apply | _: TypeApply if arg.symbol.is(Erased) => - dropInlines.transform(arg) - case _ => - PruneErasedDefs.trivialErasedTree(arg) - else dropInlines.transform(arg))) + tree.args.zip(methType.erasedParams).map((arg, isErased) => + if !isErased then arg + else + if methType.isResultDependent then + Checking.checkRealizable(arg.tpe, arg.srcPos, "erased argument") + if (methType.isImplicitMethod && arg.span.isSynthetic) + arg match + case _: RefTree | _: Apply | _: TypeApply if arg.symbol.is(Erased) => + dropInlines.transform(arg) + case _ => + PruneErasedDefs.trivialErasedTree(arg) + else dropInlines.transform(arg))) else tree def app1 = @@ -330,7 +333,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase // Check the constructor type as well; it could be an illegal singleton type // which would not be reflected as `tree.tpe` ctx.typer.checkClassType(nu.tpe, tree.srcPos, traitReq = false, stablePrefixReq = false) - Checking.checkInstantiable(tree.tpe, nu.srcPos) + Checking.checkInstantiable(tree.tpe, nu.tpe, nu.srcPos) withNoCheckNews(nu :: Nil)(app1) case _ => app1 @@ -359,6 +362,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase } case Inlined(call, bindings, expansion) if !call.isEmpty => val pos = call.sourcePos + CrossVersionChecks.checkExperimentalRef(call.symbol, pos) val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(call))) case templ: Template => @@ -371,33 +375,47 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase ) } case tree: ValDef => + registerIfHasMacroAnnotations(tree) checkErasedDef(tree) val tree1 = cpy.ValDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) if tree1.removeAttachment(desugar.UntupledParam).isDefined then checkStableSelection(tree.rhs) processValOrDefDef(super.transform(tree1)) case tree: DefDef => + registerIfHasMacroAnnotations(tree) checkErasedDef(tree) annotateContextResults(tree) val tree1 = cpy.DefDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) processValOrDefDef(superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef])) case tree: TypeDef => + registerIfHasMacroAnnotations(tree) val sym = tree.symbol if (sym.isClass) VarianceChecker.check(tree) annotateExperimental(sym) + checkMacroAnnotation(sym) + if sym.isOneOf(GivenOrImplicit) then + sym.keepAnnotationsCarrying(thisPhase, Set(defn.CompanionClassMetaAnnot), orNoneOf = defn.MetaAnnots) tree.rhs match case impl: Template => for parent <- impl.parents do Checking.checkTraitInheritance(parent.tpe.classSymbol, sym.asClass, parent.srcPos) + // Constructor parameters are in scope when typing a parent. + // While they can safely appear in a parent tree, to preserve + // soundness we need to ensure they don't appear in a parent + // type (#16270). + val illegalRefs = parent.tpe.namedPartsWith(p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym)) + if illegalRefs.nonEmpty then + report.error( + em"The type of a class parent cannot refer to constructor parameters, but ${parent.tpe} refers to ${illegalRefs.map(_.name.show).mkString(",")}", parent.srcPos) // Add SourceFile annotation to top-level classes if sym.owner.is(Package) then if ctx.compilationUnit.source.exists && sym != defn.SourceFileAnnot then val reference = ctx.settings.sourceroot.value val relativePath = util.SourceFile.relativePath(ctx.compilationUnit.source, reference) - sym.addAnnotation(Annotation.makeSourceFile(relativePath)) - if ctx.settings.Ycc.value && sym != defn.CaptureCheckedAnnot then - sym.addAnnotation(Annotation(defn.CaptureCheckedAnnot)) + sym.addAnnotation(Annotation.makeSourceFile(relativePath, tree.span)) + if Feature.pureFunsEnabled && sym != defn.WithPureFunsAnnot then + sym.addAnnotation(Annotation(defn.WithPureFunsAnnot, tree.span)) else if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then Checking.checkGoodBounds(tree.symbol) @@ -413,7 +431,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase Checking.checkGoodBounds(tree.symbol) super.transform(tree) case tree: New if isCheckable(tree) => - Checking.checkInstantiable(tree.tpe, tree.srcPos) + Checking.checkInstantiable(tree.tpe, tree.tpe, tree.srcPos) super.transform(tree) case tree: Closure if !tree.tpt.isEmpty => Checking.checkRealizable(tree.tpt.tpe, tree.srcPos, "SAM type") @@ -433,6 +451,13 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case SingletonTypeTree(ref) => Checking.checkRealizable(ref.tpe, ref.srcPos) super.transform(tree) + case tree: TypeBoundsTree => + val TypeBoundsTree(lo, hi, alias) = tree + if !alias.isEmpty then + val bounds = TypeBounds(lo.tpe, hi.tpe) + if !bounds.contains(alias.tpe) then + report.error(em"type ${alias.tpe} outside bounds $bounds", tree.srcPos) + super.transform(tree) case tree: TypeTree => tree.withType( tree.tpe match { @@ -479,6 +504,16 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase private def normalizeErasedRhs(rhs: Tree, sym: Symbol)(using Context) = if (sym.isEffectivelyErased) dropInlines.transform(rhs) else rhs + /** Check if the definition has macro annotation and sets `compilationUnit.hasMacroAnnotations` if needed. */ + private def registerIfHasMacroAnnotations(tree: DefTree)(using Context) = + if !Inlines.inInlineMethod && MacroAnnotations.hasMacroAnnotation(tree.symbol) then + ctx.compilationUnit.hasMacroAnnotations = true + + /** Check macro annotations implementations */ + private def checkMacroAnnotation(sym: Symbol)(using Context) = + if sym.derivesFrom(defn.MacroAnnotationClass) && !sym.isStatic then + report.error("classes that extend MacroAnnotation must not be inner/local classes", sym.srcPos) + private def checkErasedDef(tree: ValOrDefDef)(using Context): Unit = if tree.symbol.is(Erased, butNot = Macro) then val tpe = tree.rhs.tpe @@ -489,8 +524,8 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase private def annotateExperimental(sym: Symbol)(using Context): Unit = if sym.is(Module) && sym.companionClass.hasAnnotation(defn.ExperimentalAnnot) then - sym.addAnnotation(defn.ExperimentalAnnot) - sym.companionModule.addAnnotation(defn.ExperimentalAnnot) + sym.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) + sym.companionModule.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) } } diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala index 98e835293303..6d8f7bdb32cb 100644 --- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala @@ -70,7 +70,7 @@ class ProtectedAccessors extends MiniPhase { override def ifNoHost(reference: RefTree)(using Context): Tree = { val curCls = ctx.owner.enclosingClass transforms.println(i"${curCls.ownersIterator.toList}%, %") - report.error(i"illegal access to protected ${reference.symbol.showLocated} from $curCls", + report.error(em"illegal access to protected ${reference.symbol.showLocated} from $curCls", reference.srcPos) reference } diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala index 568512207fde..17f2d11ccfec 100644 --- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala @@ -13,6 +13,7 @@ import ast.tpd import SymUtils._ import config.Feature import Decorators.* +import dotty.tools.dotc.core.Types.MethodType /** This phase makes all erased term members of classes private so that they cannot * conflict with non-erased members. This is needed so that subsequent phases like @@ -38,8 +39,11 @@ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => else sym.copySymDenotation(initFlags = sym.flags | Private) override def transformApply(tree: Apply)(using Context): Tree = - if !tree.fun.tpe.widen.isErasedMethod then tree - else cpy.Apply(tree)(tree.fun, tree.args.map(trivialErasedTree)) + tree.fun.tpe.widen match + case mt: MethodType if mt.hasErasedParams => + cpy.Apply(tree)(tree.fun, tree.args.zip(mt.erasedParams).map((a, e) => if e then trivialErasedTree(a) else a)) + case _ => + tree override def transformValDef(tree: ValDef)(using Context): Tree = checkErasedInExperimental(tree.symbol) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 36044e6bcb91..c524bbb7702f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -4,7 +4,7 @@ package transform import core.* import Symbols.*, Contexts.*, Types.*, ContextOps.*, Decorators.*, SymDenotations.* -import Flags.*, SymUtils.*, NameKinds.* +import Flags.*, SymUtils.*, NameKinds.*, Denotations.Denotation import ast.* import Names.Name import Phases.Phase @@ -21,9 +21,11 @@ import util.Property import StdNames.nme import reporting.trace import annotation.constructorOnly +import cc.CaptureSet.IdempotentCaptRefMap +import dotty.tools.dotc.core.Denotations.SingleDenotation object Recheck: - import tpd.Tree + import tpd.* /** A flag used to indicate that a ParamAccessor has been temporarily made not-private * Only used at the start of the Recheck phase, reset at its end. @@ -36,6 +38,13 @@ object Recheck: /** Attachment key for rechecked types of TypeTrees */ val RecheckedType = Property.Key[Type] + val addRecheckedTypes = new TreeMap: + override def transform(tree: Tree)(using Context): Tree = + val tree1 = super.transform(tree) + tree.getAttachment(RecheckedType) match + case Some(tpe) => tree1.withType(tpe) + case None => tree1 + extension (sym: Symbol) /** Update symbol's info to newInfo from prevPhase.next to lastPhase. @@ -63,7 +72,7 @@ object Recheck: val symd = sym.denot symd.validFor.firstPhaseId == phase.id + 1 && (sym.originDenotation ne symd) - extension (tree: Tree) + extension [T <: Tree](tree: T) /** Remember `tpe` as the type of `tree`, which might be different from the * type stored in the tree itself, unless a type was already remembered for `tree`. @@ -78,11 +87,27 @@ object Recheck: if tpe ne tree.tpe then tree.putAttachment(RecheckedType, tpe) /** The remembered type of the tree, or if none was installed, the original type */ - def knownType = + def knownType: Type = tree.attachmentOrElse(RecheckedType, tree.tpe) def hasRememberedType: Boolean = tree.hasAttachment(RecheckedType) + def withKnownType(using Context): T = tree.getAttachment(RecheckedType) match + case Some(tpe) => tree.withType(tpe).asInstanceOf[T] + case None => tree + + extension (tpe: Type) + + /** Map ExprType => T to () ?=> T (and analogously for pure versions). + * Even though this phase runs after ElimByName, ExprTypes can still occur + * as by-name arguments of applied types. See note in doc comment for + * ElimByName phase. Test case is bynamefun.scala. + */ + def mapExprType(using Context): Type = tpe match + case ExprType(rt) => defn.ByNameFunction(rt) + case _ => tpe + + /** A base class that runs a simplified typer pass over an already re-typed program. The pass * does not transform trees but returns instead the re-typed type of each tree as it is * traversed. The Recheck phase must be directly preceded by a phase of type PreRecheck. @@ -108,7 +133,9 @@ abstract class Recheck extends Phase, SymTransformer: else sym def run(using Context): Unit = - newRechecker().checkUnit(ctx.compilationUnit) + val rechecker = newRechecker() + rechecker.checkUnit(ctx.compilationUnit) + rechecker.reset() def newRechecker()(using Context): Rechecker @@ -128,8 +155,14 @@ abstract class Recheck extends Phase, SymTransformer: */ def keepType(tree: Tree): Boolean = keepAllTypes + private val prevSelDenots = util.HashMap[NamedType, Denotation]() + + def reset()(using Context): Unit = + for (ref, mbr) <- prevSelDenots.iterator do + ref.withDenot(mbr) + /** Constant-folded rechecked type `tp` of tree `tree` */ - private def constFold(tree: Tree, tp: Type)(using Context): Type = + protected def constFold(tree: Tree, tp: Type)(using Context): Type = val tree1 = tree.withType(tp) val tree2 = ConstFold(tree1) if tree2 ne tree1 then tree2.tpe else tp @@ -137,21 +170,51 @@ abstract class Recheck extends Phase, SymTransformer: def recheckIdent(tree: Ident)(using Context): Type = tree.tpe - def recheckSelect(tree: Select)(using Context): Type = + def recheckSelect(tree: Select, pt: Type)(using Context): Type = val Select(qual, name) = tree - recheckSelection(tree, recheck(qual).widenIfUnstable, name) + val proto = + if tree.symbol == defn.Any_asInstanceOf then WildcardType + else AnySelectionProto + recheckSelection(tree, recheck(qual, proto).widenIfUnstable, name, pt) + + /** When we select the `apply` of a function with type such as `(=> A) => B`, + * we need to convert the parameter type `=> A` to `() ?=> A`. See doc comment + * of `mapExprType`. + */ + def normalizeByName(mbr: SingleDenotation)(using Context): SingleDenotation = mbr.info match + case mt: MethodType if mt.paramInfos.exists(_.isInstanceOf[ExprType]) => + mbr.derivedSingleDenotation(mbr.symbol, + mt.derivedLambdaType(paramInfos = mt.paramInfos.map(_.mapExprType))) + case _ => + mbr - /** Keep the symbol of the `select` but re-infer its type */ - def recheckSelection(tree: Select, qualType: Type, name: Name)(using Context) = + def recheckSelection(tree: Select, qualType: Type, name: Name, + sharpen: Denotation => Denotation)(using Context): Type = if name.is(OuterSelectName) then tree.tpe else //val pre = ta.maybeSkolemizePrefix(qualType, name) - val mbr = qualType.findMember(name, qualType, - excluded = if tree.symbol.is(Private) then EmptyFlags else Private - ).suchThat(tree.symbol == _) - constFold(tree, qualType.select(name, mbr)) + val mbr = normalizeByName( + sharpen( + qualType.findMember(name, qualType, + excluded = if tree.symbol.is(Private) then EmptyFlags else Private + )).suchThat(tree.symbol == _)) + val newType = tree.tpe match + case prevType: NamedType => + val prevDenot = prevType.denot + val newType = qualType.select(name, mbr) + if (newType eq prevType) && (mbr.info ne prevDenot.info) && !prevSelDenots.contains(prevType) then + prevSelDenots(prevType) = prevDenot + newType + case _ => + qualType.select(name, mbr) + constFold(tree, newType) //.showing(i"recheck select $qualType . $name : ${mbr.info} = $result") + + /** Keep the symbol of the `select` but re-infer its type */ + def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context): Type = + recheckSelection(tree, qualType, name, sharpen = identity[Denotation]) + def recheckBind(tree: Bind, pt: Type)(using Context): Type = tree match case Bind(name, body) => recheck(body, pt) @@ -187,7 +250,7 @@ abstract class Recheck extends Phase, SymTransformer: * to FromJavaObject since it got lost in ElimRepeated */ private def mapJavaArgs(formals: List[Type])(using Context): List[Type] = - val tm = new TypeMap: + val tm = new TypeMap with IdempotentCaptRefMap: def apply(t: Type) = t match case t: TypeRef if t.symbol == defn.ObjectClass => defn.FromJavaObjectType case _ => mapOver(t) @@ -198,7 +261,10 @@ abstract class Recheck extends Phase, SymTransformer: mt.instantiate(argTypes) def recheckApply(tree: Apply, pt: Type)(using Context): Type = - recheck(tree.fun).widen match + val funTp = recheck(tree.fun) + // reuse the tree's type on signature polymorphic methods, instead of using the (wrong) rechecked one + val funtpe = if tree.fun.symbol.originalSignaturePolymorphic.exists then tree.fun.tpe else funTp + funtpe.widen match case fntpe: MethodType => assert(fntpe.paramInfos.hasSameLengthAs(tree.args)) val formals = @@ -206,7 +272,7 @@ abstract class Recheck extends Phase, SymTransformer: else fntpe.paramInfos def recheckArgs(args: List[Tree], formals: List[Type], prefs: List[ParamRef]): List[Type] = args match case arg :: args1 => - val argType = recheck(arg, formals.head) + val argType = recheck(arg, formals.head.mapExprType) val formals1 = if fntpe.isParamDependent then formals.tail.map(_.substParam(prefs.head, argType)) @@ -218,6 +284,8 @@ abstract class Recheck extends Phase, SymTransformer: val argTypes = recheckArgs(tree.args, formals, fntpe.paramRefs) constFold(tree, instantiate(fntpe, argTypes, tree.fun.symbol)) //.showing(i"typed app $tree : $fntpe with ${tree.args}%, % : $argTypes%, % = $result") + case tp => + assert(false, i"unexpected type of ${tree.fun}: $funtpe") def recheckTypeApply(tree: TypeApply, pt: Type)(using Context): Type = recheck(tree.fun).widen match @@ -248,7 +316,7 @@ abstract class Recheck extends Phase, SymTransformer: recheckBlock(tree.stats, tree.expr, pt) def recheckInlined(tree: Inlined, pt: Type)(using Context): Type = - recheckBlock(tree.bindings, tree.expansion, pt) + recheckBlock(tree.bindings, tree.expansion, pt)(using inlineContext(tree.call)) def recheckIf(tree: If, pt: Type)(using Context): Type = recheck(tree.cond, defn.BooleanType) @@ -283,7 +351,20 @@ abstract class Recheck extends Phase, SymTransformer: val rawType = recheck(tree.expr) val ownType = avoidMap(rawType) - checkConforms(ownType, tree.from.symbol.returnProto, tree) + + // The pattern matching translation, which runs before this phase + // sometimes instantiates return types with singleton type alternatives + // but the returned expression is widened. We compensate by widening the expected + // type as well. See also `widenSkolems` in `checkConformsExpr` which fixes + // a more general problem. It turns out that pattern matching returns + // are not checked by Ycheck, that's why these problems were allowed to slip + // through. + def widened(tp: Type): Type = tp match + case tp: SingletonType => tp.widen + case tp: AndOrType => tp.derivedAndOrType(widened(tp.tp1), widened(tp.tp2)) + case tp @ AnnotatedType(tp1, ann) => tp.derivedAnnotatedType(widened(tp1), ann) + case _ => tp + checkConforms(ownType, widened(tree.from.symbol.returnProto), tree) defn.NothingType end recheckReturn @@ -344,7 +425,7 @@ abstract class Recheck extends Phase, SymTransformer: val sym = tree.symbol tree match case tree: Ident => recheckIdent(tree) - case tree: Select => recheckSelect(tree) + case tree: Select => recheckSelect(tree, pt) case tree: Bind => recheckBind(tree, pt) case tree: ValOrDefDef => if tree.isEmpty then NoType @@ -409,6 +490,27 @@ abstract class Recheck extends Phase, SymTransformer: throw ex } + /** Typing and previous transforms sometiems leaves skolem types in prefixes of + * NamedTypes in `expected` that do not match the `actual` Type. -Ycheck does + * not complain (need to find out why), but a full recheck does. We compensate + * by de-skolemizing everywhere in `expected` except when variance is negative. + * @return If `tp` contains SkolemTypes in covariant or invariant positions, + * the type where these SkolemTypes are mapped to their underlying type. + * Otherwise, `tp` itself + */ + def widenSkolems(tp: Type)(using Context): Type = + object widenSkolems extends TypeMap, IdempotentCaptRefMap: + var didWiden: Boolean = false + def apply(t: Type): Type = t match + case t: SkolemType if variance >= 0 => + didWiden = true + apply(t.underlying) + case t: LazyRef => t + case t @ AnnotatedType(t1, ann) => t.derivedAnnotatedType(apply(t1), ann) + case _ => mapOver(t) + val tp1 = widenSkolems(tp) + if widenSkolems.didWiden then tp1 else tp + /** If true, print info for some successful checkConforms operations (failing ones give * an error message in any case). */ @@ -424,11 +526,16 @@ abstract class Recheck extends Phase, SymTransformer: def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = //println(i"check conforms $actual <:< $expected") - val isCompatible = + + def isCompatible(expected: Type): Boolean = actual <:< expected || expected.isRepeatedParam - && actual <:< expected.translateFromRepeated(toArray = tree.tpe.isRef(defn.ArrayClass)) - if !isCompatible then + && isCompatible(expected.translateFromRepeated(toArray = tree.tpe.isRef(defn.ArrayClass))) + || { + val widened = widenSkolems(expected) + (widened ne expected) && isCompatible(widened) + } + if !isCompatible(expected) then recheckr.println(i"conforms failed for ${tree}: $actual vs $expected") err.typeMismatch(tree.withType(actual), expected) else if debugSuccesses then @@ -436,6 +543,7 @@ abstract class Recheck extends Phase, SymTransformer: case _: Ident => println(i"SUCCESS $tree:\n${TypeComparer.explained(_.isSubType(actual, expected))}") case _ => + end checkConformsExpr def checkUnit(unit: CompilationUnit)(using Context): Unit = recheck(unit.tpdTree) @@ -444,12 +552,6 @@ abstract class Recheck extends Phase, SymTransformer: /** Show tree with rechecked types instead of the types stored in the `.tpe` field */ override def show(tree: untpd.Tree)(using Context): String = - val addRecheckedTypes = new TreeMap: - override def transform(tree: Tree)(using Context): Tree = - val tree1 = super.transform(tree) - tree.getAttachment(RecheckedType) match - case Some(tpe) => tree1.withType(tpe) - case None => tree1 atPhase(thisPhase) { super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) } diff --git a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala index e462f82b1dad..b2059195b8e4 100644 --- a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala @@ -17,7 +17,6 @@ import dotty.tools.dotc.core.Annotations._ import dotty.tools.dotc.core.Names._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import scala.annotation.constructorOnly diff --git a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala index e8f8a80e1a0d..d6c11fe36748 100644 --- a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala @@ -10,6 +10,7 @@ import Symbols.defn import Constants._ import Types._ import Decorators._ +import Flags._ import scala.collection.mutable @@ -33,7 +34,7 @@ class RepeatableAnnotations extends MiniPhase: val annsByType = stableGroupBy(annotations, _.symbol) annsByType.flatMap { case (_, a :: Nil) => a :: Nil - case (sym, anns) if sym.derivesFrom(defn.ClassfileAnnotationClass) => + case (sym, anns) if sym.is(JavaDefined) => sym.getAnnotation(defn.JavaRepeatableAnnot).flatMap(_.argumentConstant(0)) match case Some(Constant(containerTpe: Type)) => val clashingAnns = annsByType.getOrElse(containerTpe.classSymbol, Nil) @@ -44,7 +45,7 @@ class RepeatableAnnotations extends MiniPhase: Nil else val aggregated = JavaSeqLiteral(anns.map(_.tree).toList, TypeTree(sym.typeRef)) - Annotation(containerTpe, NamedArg("value".toTermName, aggregated)) :: Nil + Annotation(containerTpe, NamedArg("value".toTermName, aggregated), sym.span) :: Nil case _ => val pos = anns.head.tree.srcPos report.error("Not repeatable annotation repeated", pos) diff --git a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala index dd109ce153eb..99b6be1eea8a 100644 --- a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala +++ b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala @@ -119,6 +119,9 @@ object ResolveSuper { report.error(IllegalSuperAccessor(base, memberName, targetName, acc, accTp, other.symbol, otherTp), base.srcPos) bcs = bcs.tail } + if sym.is(Accessor) then + report.error( + em"parent ${acc.owner} has a super call which binds to the value ${sym.showFullName}. Super calls can only target methods.", base) sym.orElse { val originalName = acc.name.asTermName.originalOfSuperAccessorName report.error(em"Member method ${originalName.debugString} of mixin ${acc.owner} is missing a concrete super implementation in $base.", base.srcPos) diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala index c1f891d6293a..2248fbc8d570 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala @@ -70,7 +70,7 @@ class SpecializeFunctions extends MiniPhase { /** Dispatch to specialized `apply`s in user code when available */ override def transformApply(tree: Apply)(using Context) = tree match { - case Apply(fun: NameTree, args) if fun.name == nme.apply && args.size <= 3 && fun.symbol.owner.isType => + case Apply(fun: NameTree, args) if fun.name == nme.apply && args.size <= 3 && fun.symbol.maybeOwner.isType => val argTypes = fun.tpe.widen.firstParamTypes.map(_.widenSingleton.dealias) val retType = tree.tpe.widenSingleton.dealias val isSpecializable = diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 31c28d7b1854..bc4119ad0cff 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -19,6 +19,8 @@ import dotty.tools.dotc.core.Denotations.staticRef import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.quoted.Interpreter + import scala.util.control.NonFatal import dotty.tools.dotc.util.SrcPos import dotty.tools.repl.AbstractFileClassLoader @@ -32,7 +34,8 @@ import scala.quoted.runtime.impl._ /** Utility class to splice quoted expressions */ object Splicer { - import tpd._ + import tpd.* + import Interpreter.* /** Splice the Tree for a Quoted expression. `${'{xyz}}` becomes `xyz` * and for `$xyz` the tree of `xyz` is interpreted for which the @@ -50,7 +53,7 @@ object Splicer { val oldContextClassLoader = Thread.currentThread().getContextClassLoader Thread.currentThread().setContextClassLoader(classLoader) try { - val interpreter = new Interpreter(splicePos, classLoader) + val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree val interpretedExpr = interpreter.interpret[Quotes => scala.quoted.Expr[Any]](tree) @@ -66,7 +69,7 @@ object Splicer { throw ex case ex: scala.quoted.runtime.StopMacroExpansion => if !ctx.reporter.hasErrors then - report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users to facilitate debugging when aborting a macro expansion.", splicePos) + report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", splicePos) // errors have been emitted EmptyTree case ex: StopInterpretation => @@ -74,16 +77,16 @@ object Splicer { ref(defn.Predef_undefined).withType(ErrorType(ex.msg)) case NonFatal(ex) => val msg = - s"""Failed to evaluate macro. - | Caused by ${ex.getClass}: ${if (ex.getMessage == null) "" else ex.getMessage} - | ${ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.Splicer$").drop(1).mkString("\n ")} - """.stripMargin + em"""Failed to evaluate macro. + | Caused by ${ex.getClass}: ${if (ex.getMessage == null) "" else ex.getMessage} + | ${ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.Splicer$").drop(1).mkString("\n ")} + """ report.error(msg, spliceExpansionPos) ref(defn.Predef_undefined).withType(ErrorType(msg)) } } - /** Checks that no symbol that whas generated within the macro expansion has an out of scope reference */ + /** Checks that no symbol that was generated within the macro expansion has an out of scope reference */ def checkEscapedVariables(tree: Tree, expansionOwner: Symbol)(using Context): tree.type = new TreeTraverser { private[this] var locals = Set.empty[Symbol] @@ -116,7 +119,10 @@ object Splicer { sym.exists && !sym.is(Package) && sym.owner.ownersIterator.exists(x => x == expansionOwner || // symbol was generated within this macro expansion - x.is(Macro, butNot = Method) && x.name == nme.MACROkw // symbol was generated within another macro expansion + { // symbol was generated within another macro expansion + isMacroOwner(x) && + !ctx.owner.ownersIterator.contains(x) + } ) && !locals.contains(sym) // symbol is not in current scope }.traverse(tree) @@ -219,24 +225,21 @@ object Splicer { checkIfValidStaticCall(tree)(using Set.empty) } - /** Tree interpreter that evaluates the tree */ - private class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) { - - type Env = Map[Symbol, Object] - - /** Returns the interpreted result of interpreting the code a call to the symbol with default arguments. - * Return Some of the result or None if some error happen during the interpretation. - */ - def interpret[T](tree: Tree)(implicit ct: ClassTag[T]): Option[T] = - interpretTree(tree)(Map.empty) match { - case obj: T => Some(obj) - case obj => - // TODO upgrade to a full type tag check or something similar - report.error(s"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) - None - } + /** Is this the dummy owner of a macro expansion */ + def isMacroOwner(sym: Symbol)(using Context): Boolean = + sym.is(Macro, butNot = Method) && sym.name == nme.MACROkw - def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + /** Is this the dummy owner of a macro expansion */ + def inMacroExpansion(using Context) = + ctx.owner.ownersIterator.exists(isMacroOwner) + + /** Tree interpreter that evaluates the tree. + * Interpreter is assumed to start at quotation level -1. + */ + private class SpliceInterpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) extends Interpreter(pos, classLoader) { + + override protected def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + // Interpret level -1 quoted code `'{...}` (assumed without level 0 splices) case Apply(Select(Apply(TypeApply(fn, _), quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote => val quoted1 = quoted match { case quoted: Ident if quoted.symbol.isAllOf(InlineByNameProxy) => @@ -245,324 +248,14 @@ object Splicer { case Inlined(EmptyTree, _, quoted) => quoted case _ => quoted } - interpretQuote(quoted1) + new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(quoted1, ctx.owner)).withSpan(quoted1.span), SpliceScope.getCurrent) + // Interpret level -1 `Type.of[T]` case Apply(TypeApply(fn, quoted :: Nil), _) if fn.symbol == defn.QuotedTypeModule_of => - interpretTypeQuote(quoted) - - case Literal(Constant(value)) => - interpretLiteral(value) - - case tree: Ident if tree.symbol.is(Inline, butNot = Method) => - tree.tpe.widenTermRefExpr match - case ConstantType(c) => c.value.asInstanceOf[Object] - case _ => throw new StopInterpretation(em"${tree.symbol} could not be inlined", tree.srcPos) - - // TODO disallow interpreted method calls as arguments - case Call(fn, args) => - if (fn.symbol.isConstructor && fn.symbol.owner.owner.is(Package)) - interpretNew(fn.symbol, args.flatten.map(interpretTree)) - else if (fn.symbol.is(Module)) - interpretModuleAccess(fn.symbol) - else if (fn.symbol.is(Method) && fn.symbol.isStatic) { - val staticMethodCall = interpretedStaticMethodCall(fn.symbol.owner, fn.symbol) - staticMethodCall(interpretArgs(args, fn.symbol.info)) - } - else if fn.symbol.isStatic then - assert(args.isEmpty) - interpretedStaticFieldAccess(fn.symbol) - else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) - if (fn.name == nme.asInstanceOfPM) - interpretModuleAccess(fn.qualifier.symbol) - else { - val staticMethodCall = interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol) - staticMethodCall(interpretArgs(args, fn.symbol.info)) - } - else if (env.contains(fn.symbol)) - env(fn.symbol) - else if (tree.symbol.is(InlineProxy)) - interpretTree(tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs) - else - unexpectedTree(tree) - - case closureDef((ddef @ DefDef(_, ValDefs(arg :: Nil) :: Nil, _, _))) => - (obj: AnyRef) => interpretTree(ddef.rhs)(using env.updated(arg.symbol, obj)) - - // Interpret `foo(j = x, i = y)` which it is expanded to - // `val j$1 = x; val i$1 = y; foo(i = i$1, j = j$1)` - case Block(stats, expr) => interpretBlock(stats, expr) - case NamedArg(_, arg) => interpretTree(arg) - - case Inlined(_, bindings, expansion) => interpretBlock(bindings, expansion) - - case Typed(expr, _) => - interpretTree(expr) - - case SeqLiteral(elems, _) => - interpretVarargs(elems.map(e => interpretTree(e))) + new TypeImpl(QuoteUtils.changeOwnerOfTree(quoted, ctx.owner), SpliceScope.getCurrent) case _ => - unexpectedTree(tree) - } - - private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = { - def interpretArgsGroup(args: List[Tree], argTypes: List[Type]): List[Object] = - assert(args.size == argTypes.size) - val view = - for (arg, info) <- args.lazyZip(argTypes) yield - info match - case _: ExprType => () => interpretTree(arg) // by-name argument - case _ => interpretTree(arg) // by-value argument - view.toList - - fnType.dealias match - case fnType: MethodType if fnType.isErasedMethod => interpretArgs(argss, fnType.resType) - case fnType: MethodType => - val argTypes = fnType.paramInfos - assert(argss.head.size == argTypes.size) - interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, fnType.resType) - case fnType: AppliedType if defn.isContextFunctionType(fnType) => - val argTypes :+ resType = fnType.args: @unchecked - interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, resType) - case fnType: PolyType => interpretArgs(argss, fnType.resType) - case fnType: ExprType => interpretArgs(argss, fnType.resType) - case _ => - assert(argss.isEmpty) - Nil - } - - private def interpretBlock(stats: List[Tree], expr: Tree)(implicit env: Env) = { - var unexpected: Option[Object] = None - val newEnv = stats.foldLeft(env)((accEnv, stat) => stat match { - case stat: ValDef => - accEnv.updated(stat.symbol, interpretTree(stat.rhs)(accEnv)) - case stat => - if (unexpected.isEmpty) - unexpected = Some(unexpectedTree(stat)) - accEnv - }) - unexpected.getOrElse(interpretTree(expr)(newEnv)) - } - - private def interpretQuote(tree: Tree)(implicit env: Env): Object = - new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(tree, ctx.owner)).withSpan(tree.span), SpliceScope.getCurrent) - - private def interpretTypeQuote(tree: Tree)(implicit env: Env): Object = - new TypeImpl(QuoteUtils.changeOwnerOfTree(tree, ctx.owner), SpliceScope.getCurrent) - - private def interpretLiteral(value: Any)(implicit env: Env): Object = - value.asInstanceOf[Object] - - private def interpretVarargs(args: List[Object])(implicit env: Env): Object = - args.toSeq - - private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol)(implicit env: Env): List[Object] => Object = { - val (inst, clazz) = - try - if (moduleClass.name.startsWith(str.REPL_SESSION_LINE)) - (null, loadReplLineClass(moduleClass)) - else { - val inst = loadModule(moduleClass) - (inst, inst.getClass) - } - catch - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - - val name = fn.name.asTermName - val method = getMethod(clazz, name, paramsSig(fn)) - (args: List[Object]) => stopIfRuntimeException(method.invoke(inst, args: _*), method) - } - - private def interpretedStaticFieldAccess(sym: Symbol)(implicit env: Env): Object = { - val clazz = loadClass(sym.owner.fullName.toString) - val field = clazz.getField(sym.name.toString) - field.get(null) - } - - private def interpretModuleAccess(fn: Symbol)(implicit env: Env): Object = - loadModule(fn.moduleClass) - - private def interpretNew(fn: Symbol, args: => List[Object])(implicit env: Env): Object = { - val clazz = loadClass(fn.owner.fullName.toString) - val constr = clazz.getConstructor(paramsSig(fn): _*) - constr.newInstance(args: _*).asInstanceOf[Object] - } - - private def unexpectedTree(tree: Tree)(implicit env: Env): Object = - throw new StopInterpretation("Unexpected tree could not be interpreted: " + tree, tree.srcPos) - - private def loadModule(sym: Symbol): Object = - if (sym.owner.is(Package)) { - // is top level object - val moduleClass = loadClass(sym.fullName.toString) - moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) - } - else { - // nested object in an object - val className = { - val pack = sym.topLevelClass.owner - if (pack == defn.RootPackage || pack == defn.EmptyPackageClass) sym.flatName.toString - else pack.showFullName + "." + sym.flatName - } - val clazz = loadClass(className) - clazz.getConstructor().newInstance().asInstanceOf[Object] - } - - private def loadReplLineClass(moduleClass: Symbol)(implicit env: Env): Class[?] = { - val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) - lineClassloader.loadClass(moduleClass.name.firstPart.toString) - } - - private def loadClass(name: String): Class[?] = - try classLoader.loadClass(name) - catch { - case _: ClassNotFoundException => - val msg = s"Could not find class $name in classpath" - throw new StopInterpretation(msg, pos) - } - - private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = - try clazz.getMethod(name.toString, paramClasses: _*) - catch { - case _: NoSuchMethodException => - val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" - throw new StopInterpretation(msg, pos) - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - } - - private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = - try thunk - catch { - case ex: RuntimeException => - val sw = new StringWriter() - sw.write("A runtime exception occurred while executing macro expansion\n") - sw.write(ex.getMessage) - sw.write("\n") - ex.printStackTrace(new PrintWriter(sw)) - sw.write("\n") - throw new StopInterpretation(sw.toString, pos) - case ex: InvocationTargetException => - ex.getTargetException match { - case ex: scala.quoted.runtime.StopMacroExpansion => - throw ex - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - case targetException => - val sw = new StringWriter() - sw.write("Exception occurred while executing macro expansion.\n") - if (!ctx.settings.Ydebug.value) { - val end = targetException.getStackTrace.lastIndexWhere { x => - x.getClassName == method.getDeclaringClass.getCanonicalName && x.getMethodName == method.getName - } - val shortStackTrace = targetException.getStackTrace.take(end + 1) - targetException.setStackTrace(shortStackTrace) - } - targetException.printStackTrace(new PrintWriter(sw)) - sw.write("\n") - throw new StopInterpretation(sw.toString, pos) - } - } - - private object MissingClassDefinedInCurrentRun { - def unapply(targetException: NoClassDefFoundError)(using Context): Option[Symbol] = { - val className = targetException.getMessage - if (className == null) None - else { - val sym = staticRef(className.toTypeName).symbol - if (sym.isDefinedInCurrentRun) Some(sym) else None - } - } - } - - /** List of classes of the parameters of the signature of `sym` */ - private def paramsSig(sym: Symbol): List[Class[?]] = { - def paramClass(param: Type): Class[?] = { - def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match { - case JavaArrayType(elemType) => arrayDepth(elemType, depth + 1) - case _ => (tpe, depth) - } - def javaArraySig(tpe: Type): String = { - val (elemType, depth) = arrayDepth(tpe, 0) - val sym = elemType.classSymbol - val suffix = - if (sym == defn.BooleanClass) "Z" - else if (sym == defn.ByteClass) "B" - else if (sym == defn.ShortClass) "S" - else if (sym == defn.IntClass) "I" - else if (sym == defn.LongClass) "J" - else if (sym == defn.FloatClass) "F" - else if (sym == defn.DoubleClass) "D" - else if (sym == defn.CharClass) "C" - else "L" + javaSig(elemType) + ";" - ("[" * depth) + suffix - } - def javaSig(tpe: Type): String = tpe match { - case tpe: JavaArrayType => javaArraySig(tpe) - case _ => - // Take the flatten name of the class and the full package name - val pack = tpe.classSymbol.topLevelClass.owner - val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." - packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString - } - - val sym = param.classSymbol - if (sym == defn.BooleanClass) classOf[Boolean] - else if (sym == defn.ByteClass) classOf[Byte] - else if (sym == defn.CharClass) classOf[Char] - else if (sym == defn.ShortClass) classOf[Short] - else if (sym == defn.IntClass) classOf[Int] - else if (sym == defn.LongClass) classOf[Long] - else if (sym == defn.FloatClass) classOf[Float] - else if (sym == defn.DoubleClass) classOf[Double] - else java.lang.Class.forName(javaSig(param), false, classLoader) - } - def getExtraParams(tp: Type): List[Type] = tp.widenDealias match { - case tp: AppliedType if defn.isContextFunctionType(tp) => - // Call context function type direct method - tp.args.init.map(arg => TypeErasure.erasure(arg)) ::: getExtraParams(tp.args.last) - case _ => Nil - } - val extraParams = getExtraParams(sym.info.finalResultType) - val allParams = TypeErasure.erasure(sym.info) match { - case meth: MethodType => meth.paramInfos ::: extraParams - case _ => extraParams - } - allParams.map(paramClass) - } - } - - - - /** Exception that stops interpretation if some issue is found */ - private class StopInterpretation(val msg: String, val pos: SrcPos) extends Exception - - object Call { - /** Matches an expression that is either a field access or an application - * It retruns a TermRef containing field accessed or a method reference and the arguments passed to it. - */ - def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = - Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) - - private object Call0 { - def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match { - case Select(Call0(fn, args), nme.apply) if defn.isContextFunctionType(fn.tpe.widenDealias.finalResultType) => - Some((fn, args)) - case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) - case fn: Select => Some((fn, Nil)) - case Apply(f @ Call0(fn, args1), args2) => - if (f.tpe.widenDealias.isErasedMethod) Some((fn, args1)) - else Some((fn, args2 :: args1)) - case TypeApply(Call0(fn, args), _) => Some((fn, args)) - case _ => None - } + super.interpretTree(tree) } } } diff --git a/compiler/src/dotty/tools/dotc/transform/Splicing.scala b/compiler/src/dotty/tools/dotc/transform/Splicing.scala index ad3f0322130d..bb82fba32a7c 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicing.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicing.scala @@ -14,15 +14,16 @@ import util.Spans._ import SymUtils._ import NameKinds._ import dotty.tools.dotc.ast.tpd -import StagingContext._ import scala.collection.mutable import dotty.tools.dotc.core.Annotations._ import dotty.tools.dotc.core.Names._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import dotty.tools.dotc.config.ScalaRelease.* +import dotty.tools.dotc.staging.QuoteContext.* +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.QuoteTypeTags import scala.annotation.constructorOnly @@ -77,7 +78,7 @@ class Splicing extends MacroTransform: override def run(using Context): Unit = if ctx.compilationUnit.needsStaging then - super.run(using freshStagingContext) + super.run protected def newTransformer(using Context): Transformer = Level0QuoteTransformer @@ -190,7 +191,7 @@ class Splicing extends MacroTransform: private var refBindingMap = mutable.Map.empty[Symbol, (Tree, Symbol)] /** Reference to the `Quotes` instance of the current level 1 splice */ private var quotes: Tree | Null = null // TODO: add to the context - private var healedTypes: PCPCheckAndHeal.QuoteTypeTags | Null = null // TODO: add to the context + private var healedTypes: QuoteTypeTags | Null = null // TODO: add to the context def transformSplice(tree: tpd.Tree, tpe: Type, holeIdx: Int)(using Context): tpd.Tree = assert(level == 0) @@ -206,6 +207,14 @@ class Splicing extends MacroTransform: override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match + case tree: Select if tree.isTerm && isCaptured(tree.symbol) => + tree.symbol.allOverriddenSymbols.find(sym => !isCaptured(sym.owner)) match + case Some(sym) => + // virtualize call on overridden symbol that is not defined in a non static class + transform(tree.qualifier.select(sym)) + case _ => + report.error(em"Can not use reference to staged local ${tree.symbol} defined in an outer quote.\n\nThis can work if ${tree.symbol.owner} would extend a top level interface that defines ${tree.symbol}.", tree) + tree case tree: RefTree => if tree.isTerm then if isCaptured(tree.symbol) then @@ -242,9 +251,16 @@ class Splicing extends MacroTransform: else args.mapConserve(arg => transformLevel0QuoteContent(arg)(using quoteContext)) } cpy.Apply(tree)(cpy.Select(sel)(cpy.Apply(app)(fn, newArgs), nme.apply), quotesArgs) - case Apply(TypeApply(_, List(tpt)), List(quotes)) + case Apply(TypeApply(typeof, List(tpt)), List(quotes)) if tree.symbol == defn.QuotedTypeModule_of && containsCapturedType(tpt.tpe) => - ref(capturedType(tpt))(using ctx.withSource(tree.source)).withSpan(tree.span) + val newContent = capturedPartTypes(tpt) + newContent match + case block: Block => + inContext(ctx.withSource(tree.source)) { + Apply(TypeApply(typeof, List(newContent)), List(quotes)).withSpan(tree.span) + } + case _ => + ref(capturedType(newContent))(using ctx.withSource(tree.source)).withSpan(tree.span) case CapturedApplication(fn, argss) => transformCapturedApplication(tree, fn, argss) case _ => @@ -253,7 +269,7 @@ class Splicing extends MacroTransform: private def transformLevel0QuoteContent(tree: Tree)(using Context): Tree = // transform and collect new healed types val old = healedTypes - healedTypes = new PCPCheckAndHeal.QuoteTypeTags(tree.span) + healedTypes = new QuoteTypeTags(tree.span) val tree1 = transform(tree) val newHealedTypes = healedTypes.nn.getTypeTags healedTypes = old @@ -335,17 +351,46 @@ class Splicing extends MacroTransform: val bindingSym = refBindingMap.getOrElseUpdate(tree.symbol, (tree, newBinding))._2 ref(bindingSym) - private def capturedType(tree: Tree)(using Context): Symbol = - val tpe = tree.tpe.widenTermRefExpr - def newBinding = newSymbol( + private def newQuotedTypeClassBinding(tpe: Type)(using Context) = + newSymbol( spliceOwner, UniqueName.fresh(nme.Type).toTermName, Param, defn.QuotedTypeClass.typeRef.appliedTo(tpe), ) - val bindingSym = refBindingMap.getOrElseUpdate(tree.symbol, (TypeTree(tree.tpe), newBinding))._2 + + private def capturedType(tree: Tree)(using Context): Symbol = + val tpe = tree.tpe.widenTermRefExpr + val bindingSym = refBindingMap + .getOrElseUpdate(tree.symbol, (TypeTree(tree.tpe), newQuotedTypeClassBinding(tpe)))._2 bindingSym + private def capturedPartTypes(tpt: Tree)(using Context): Tree = + val old = healedTypes + healedTypes = QuoteTypeTags(tpt.span) + val capturePartTypes = new TypeMap { + def apply(tp: Type) = tp match { + case typeRef: TypeRef if containsCapturedType(typeRef) => + val termRef = refBindingMap + .getOrElseUpdate(typeRef.symbol, (TypeTree(typeRef), newQuotedTypeClassBinding(typeRef)))._2.termRef + val tagRef = healedTypes.nn.getTagRef(termRef) + tagRef + case _ => + mapOver(tp) + } + } + val captured = capturePartTypes(tpt.tpe.widenTermRefExpr) + val newHealedTypes = healedTypes.nn.getTypeTags + healedTypes = old + tpt match + case block: Block => + cpy.Block(block)(newHealedTypes ::: block.stats, TypeTree(captured)) + case _ => + if newHealedTypes.nonEmpty then + cpy.Block(tpt)(newHealedTypes, TypeTree(captured)) + else + tpt + private def getTagRefFor(tree: Tree)(using Context): Tree = val capturedTypeSym = capturedType(tree) TypeTree(healedTypes.nn.getTagRef(capturedTypeSym.termRef)) diff --git a/compiler/src/dotty/tools/dotc/transform/Staging.scala b/compiler/src/dotty/tools/dotc/transform/Staging.scala index 1de050a9a6c1..83b2bcdbcaa6 100644 --- a/compiler/src/dotty/tools/dotc/transform/Staging.scala +++ b/compiler/src/dotty/tools/dotc/transform/Staging.scala @@ -6,18 +6,18 @@ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Phases._ import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StagingContext._ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ import dotty.tools.dotc.util.SrcPos import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.transform.TreeMapWithStages._ +import dotty.tools.dotc.staging.QuoteContext.* +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.CrossStageSafety +import dotty.tools.dotc.staging.HealType - - -/** Checks that the Phase Consistency Principle (PCP) holds and heals types. +/** Checks that staging level consistency holds and heals types used in higher levels. * - * Type healing consists in transforming a phase inconsistent type `T` into `${ implicitly[Type[T]] }`. + * See `CrossStageSafety` */ class Staging extends MacroTransform { import tpd._ @@ -32,23 +32,25 @@ class Staging extends MacroTransform { override def checkPostCondition(tree: Tree)(using Context): Unit = if (ctx.phase <= splicingPhase) { - // Recheck that PCP holds but do not heal any inconsistent types as they should already have been heald + // Recheck that staging level consistency holds but do not heal any inconsistent types as they should already have been heald tree match { case PackageDef(pid, _) if tree.symbol.owner == defn.RootClass => - val checker = new PCPCheckAndHeal(freshStagingContext) { - override protected def tryHeal(sym: Symbol, tp: TypeRef, pos: SrcPos)(using Context): TypeRef = { - def symStr = - if (sym.is(ModuleClass)) sym.sourceModule.show - else i"${sym.name}.this" - val errMsg = s"\nin ${ctx.owner.fullName}" - assert( - ctx.owner.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) || - (sym.isType && levelOf(sym) > 0), - em"""access to $symStr from wrong staging level: - | - the definition is at level ${levelOf(sym)}, - | - but the access is at level $level.$errMsg""") + val checker = new CrossStageSafety { + override protected def healType(pos: SrcPos)(using Context) = new HealType(pos) { + override protected def tryHeal(sym: Symbol, tp: TypeRef, pos: SrcPos): TypeRef = { + def symStr = + if (sym.is(ModuleClass)) sym.sourceModule.show + else i"${sym.name}.this" + val errMsg = s"\nin ${ctx.owner.fullName}" + assert( + ctx.owner.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) || + (sym.isType && levelOf(sym) > 0), + em"""access to $symStr from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level.$errMsg""") - tp + tp + } } } checker.transform(tree) @@ -66,11 +68,11 @@ class Staging extends MacroTransform { } override def run(using Context): Unit = - if (ctx.compilationUnit.needsStaging) super.run(using freshStagingContext) + if (ctx.compilationUnit.needsStaging) super.run protected def newTransformer(using Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = - new PCPCheckAndHeal(ctx).transform(tree) + (new CrossStageSafety).transform(tree) } } diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index b0c8605e7dd1..b78c75d58340 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -88,7 +88,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { // Diagnostic for SI-7091 if (!accDefs.contains(clazz)) report.error( - s"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz.is(Package)}. Accessor required for ${sel} (${sel.show})", + em"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz.is(Package)}. Accessor required for ${sel.toString} ($sel)", sel.srcPos) else accDefs(clazz) += DefDef(acc, EmptyTree).withSpan(accRange) acc @@ -109,16 +109,16 @@ class SuperAccessors(thisPhase: DenotTransformer) { if (sym.isTerm && !sym.is(Method, butNot = Accessor) && !ctx.owner.isAllOf(ParamForwarder)) // ParamForwaders as installed ParamForwarding.scala do use super calls to vals - report.error(s"super may be not be used on ${sym.underlyingSymbol}", sel.srcPos) + report.error(em"super may be not be used on ${sym.underlyingSymbol}", sel.srcPos) else if (isDisallowed(sym)) - report.error(s"super not allowed here: use this.${sel.name} instead", sel.srcPos) + report.error(em"super not allowed here: use this.${sel.name} instead", sel.srcPos) else if (sym.is(Deferred)) { val member = sym.overridingSymbol(clazz.asClass) if (!mix.name.isEmpty || !member.exists || !(member.is(AbsOverride) && member.isIncompleteIn(clazz))) report.error( - i"${sym.showLocated} is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'", + em"${sym.showLocated} is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'", sel.srcPos) else report.log(i"ok super $sel ${sym.showLocated} $member $clazz ${member.isIncompleteIn(clazz)}") } @@ -131,7 +131,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { val overriding = sym.overridingSymbol(intermediateClass) if (overriding.is(Deferred, butNot = AbsOverride) && !overriding.owner.is(Trait)) report.error( - s"${sym.showLocated} cannot be directly accessed from ${clazz} because ${overriding.owner} redeclares it as abstract", + em"${sym.showLocated} cannot be directly accessed from ${clazz} because ${overriding.owner} redeclares it as abstract", sel.srcPos) } else { @@ -174,27 +174,30 @@ class SuperAccessors(thisPhase: DenotTransformer) { val sel @ Select(qual, name) = tree: @unchecked val sym = sel.symbol - /** If an accesses to protected member of a class comes from a trait, - * or would need a protected accessor placed in a trait, we cannot - * perform the access to the protected member directly since jvm access - * restrictions require the call site to be in an actual subclass and - * traits don't count as subclasses in this respect. In this case - * we generate a super accessor instead. See SI-2296. - */ def needsSuperAccessor = ProtectedAccessors.needsAccessorIfNotInSubclass(sym) && AccessProxies.hostForAccessorOf(sym).is(Trait) qual match { case _: This if needsSuperAccessor => - /* - * A trait which extends a class and accesses a protected member - * of that class cannot implement the necessary accessor method - * because jvm access restrictions require the call site to be in - * an actual subclass and traits don't count as subclasses in this - * respect. We generate a super accessor itself, which will be fixed - * by the implementing class. See SI-2296. - */ - superAccessorCall(sel) + /* Given a protected member m defined in class C, + * and a trait T that calls m. + * + * If T extends C, then we can access it by casting + * the qualifier of the select to C. + * + * That's because the protected method is actually public, + * so we can call it. For truly protected methods, like from + * Java, we error instead of emitting the wrong code (i17021.ext-java). + * + * Otherwise, we need to go through an accessor, + * which the implementing class will provide an implementation for. + */ + if ctx.owner.enclosingClass.derivesFrom(sym.owner) then + if sym.is(JavaDefined) then + report.error(em"${ctx.owner} accesses protected $sym inside a concrete trait method: use super.${sel.name} instead", sel.srcPos) + sel + else + superAccessorCall(sel) case Super(_, mix) => transformSuperSelect(sel) case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala index 0a6fa9217303..b945f5820523 100644 --- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala @@ -270,11 +270,8 @@ object SymUtils: def isEnumCase(using Context): Boolean = self.isAllOf(EnumCase, butNot = JavaDefined) - def annotationsCarrying(meta: ClassSymbol)(using Context): List[Annotation] = - self.annotations.filter(_.symbol.hasAnnotation(meta)) - - def withAnnotationsCarrying(from: Symbol, meta: ClassSymbol)(using Context): self.type = { - self.addAnnotations(from.annotationsCarrying(meta)) + def withAnnotationsCarrying(from: Symbol, meta: Symbol, orNoneOf: Set[Symbol] = Set.empty)(using Context): self.type = { + self.addAnnotations(from.annotationsCarrying(Set(meta), orNoneOf)) self } @@ -363,6 +360,8 @@ object SymUtils: self.hasAnnotation(defn.ExperimentalAnnot) || isDefaultArgumentOfExperimentalMethod || (!self.is(Package) && self.owner.isInExperimentalScope) + || self.topLevelClass.ownersIterator.exists(p => + p.is(Package) && p.owner.isRoot && p.name == tpnme.dotty) /** The declared self type of this class, as seen from `site`, stripping * all refinements for opaque types. @@ -382,7 +381,7 @@ object SymUtils: if original.hasAnnotation(defn.TargetNameAnnot) then self.addAnnotation( Annotation(defn.TargetNameAnnot, - Literal(Constant(nameFn(original.targetName).toString)).withSpan(original.span))) + Literal(Constant(nameFn(original.targetName).toString)).withSpan(original.span), original.span)) /** The return type as seen from the body of this definition. It is * computed from the symbol's type by replacing param refs by param symbols. diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala index 0a9a7a83948c..48bcbaab3511 100644 --- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala @@ -13,6 +13,7 @@ import ast.untpd import ValueClasses.isDerivedValueClass import SymUtils._ import util.Property +import util.Spans.Span import config.Printers.derive import NullOpsDecorator._ @@ -155,7 +156,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { case nme.hashCode_ => chooseHashcode case nme.toString_ => toStringBody(vrefss) case nme.equals_ => equalsBody(vrefss.head.head) - case nme.canEqual_ => canEqualBody(vrefss.head.head) + case nme.canEqual_ => canEqualBody(vrefss.head.head, synthetic.span) case nme.ordinal => ordinalRef case nme.productArity => Literal(Constant(accessors.length)) case nme.productPrefix if isEnumValue => nameRef @@ -260,13 +261,13 @@ class SyntheticMembers(thisPhase: DenotTransformer) { def equalsBody(that: Tree)(using Context): Tree = { val thatAsClazz = newSymbol(ctx.owner, nme.x_0, SyntheticCase, clazzType, coord = ctx.owner.span) // x$0 def wildcardAscription(tp: Type) = Typed(Underscore(tp), TypeTree(tp)) - val pattern = Bind(thatAsClazz, wildcardAscription(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot)))) // x$0 @ (_: C @unchecked) + val pattern = Bind(thatAsClazz, wildcardAscription(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot, thatAsClazz.span)))) // x$0 @ (_: C @unchecked) // compare primitive fields first, slow equality checks of non-primitive fields can be skipped when primitives differ val sortedAccessors = accessors.sortBy(accessor => if (accessor.info.typeSymbol.isPrimitiveValueClass) 0 else 1) val comparisons = sortedAccessors.map { accessor => This(clazz).withSpan(ctx.owner.span.focus).select(accessor).equal(ref(thatAsClazz).select(accessor)) } var rhs = // this.x == this$0.x && this.y == x$0.y && that.canEqual(this) - if comparisons.isEmpty then Literal(Constant(true)) else comparisons.reduceLeft(_ and _) + if comparisons.isEmpty then Literal(Constant(true)) else comparisons.reduceBalanced(_ and _) val canEqualMeth = existingDef(defn.Product_canEqual, clazz) if !clazz.is(Final) || canEqualMeth.exists && !canEqualMeth.is(Synthetic) then rhs = rhs.and( @@ -390,7 +391,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * * `@unchecked` is needed for parametric case classes. */ - def canEqualBody(that: Tree): Tree = that.isInstance(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot))) + def canEqualBody(that: Tree, span: Span): Tree = that.isInstance(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot, span))) symbolsToSynthesize.flatMap(syntheticDefIfMissing) } diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 71b66c3d0da6..741b9d1627fe 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -4,7 +4,7 @@ package transform import ast.{TreeTypeMap, tpd} import config.Printers.tailrec import core.* -import Contexts.*, Flags.*, Symbols.* +import Contexts.*, Flags.*, Symbols.*, Decorators.em import Constants.Constant import NameKinds.{TailLabelName, TailLocalName, TailTempName} import StdNames.nme @@ -303,7 +303,7 @@ class TailRec extends MiniPhase { def fail(reason: String) = { if (isMandatory) { failureReported = true - report.error(s"Cannot rewrite recursive call: $reason", tree.srcPos) + report.error(em"Cannot rewrite recursive call: $reason", tree.srcPos) } else tailrec.println("Cannot rewrite recursive call at: " + tree.span + " because: " + reason) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index d1cd4e8729a2..f9240d6091c4 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -42,10 +42,6 @@ class TreeChecker extends Phase with SymTransformer { private val seenClasses = collection.mutable.HashMap[String, Symbol]() private val seenModuleVals = collection.mutable.HashMap[String, Symbol]() - def isValidJVMName(name: Name): Boolean = name.toString.forall(isValidJVMChar) - - def isValidJVMMethodName(name: Name): Boolean = name.toString.forall(isValidJVMMethodChar) - val NoSuperClassFlags: FlagSet = Trait | Package def testDuplicate(sym: Symbol, registry: mutable.Map[String, Symbol], typ: String)(using Context): Unit = { @@ -91,7 +87,7 @@ class TreeChecker extends Phase with SymTransformer { if (ctx.phaseId <= erasurePhase.id) { val initial = symd.initial assert(symd == initial || symd.signature == initial.signature, - i"""Signature of ${sym.showLocated} changed at phase ${ctx.phase.prevMega} + i"""Signature of ${sym} in ${sym.ownersIterator.toList}%, % changed at phase ${ctx.phase.prevMega} |Initial info: ${initial.info} |Initial sig : ${initial.signature} |Current info: ${symd.info} @@ -109,18 +105,6 @@ class TreeChecker extends Phase with SymTransformer { else if (ctx.phase.prev.isCheckable) check(ctx.base.allPhases.toIndexedSeq, ctx) - private def previousPhases(phases: List[Phase])(using Context): List[Phase] = phases match { - case (phase: MegaPhase) :: phases1 => - val subPhases = phase.miniPhases - val previousSubPhases = previousPhases(subPhases.toList) - if (previousSubPhases.length == subPhases.length) previousSubPhases ::: previousPhases(phases1) - else previousSubPhases - case phase :: phases1 if phase ne ctx.phase => - phase :: previousPhases(phases1) - case _ => - Nil - } - def check(phasesToRun: Seq[Phase], ctx: Context): Tree = { val fusedPhase = ctx.phase.prevMega(using ctx) report.echo(s"checking ${ctx.compilationUnit} after phase ${fusedPhase}")(using ctx) @@ -134,7 +118,6 @@ class TreeChecker extends Phase with SymTransformer { val checkingCtx = ctx .fresh - .addMode(Mode.ImplicitsEnabled) .setReporter(new ThrowingReporter(ctx.reporter)) val checker = inContext(ctx) { @@ -150,9 +133,80 @@ class TreeChecker extends Phase with SymTransformer { } } + /** + * Checks that `New` nodes are always wrapped inside `Select` nodes. + */ + def assertSelectWrapsNew(tree: Tree)(using Context): Unit = + (new TreeAccumulator[tpd.Tree] { + override def apply(parent: Tree, tree: Tree)(using Context): Tree = { + tree match { + case tree: New if !parent.isInstanceOf[tpd.Select] => + assert(assertion = false, i"`New` node must be wrapped in a `Select` of the constructor:\n parent = ${parent.show}\n child = ${tree.show}") + case _: Annotated => + // Don't check inside annotations, since they're allowed to contain + // somewhat invalid trees. + case _ => + foldOver(tree, tree) // replace the parent when folding over the children + } + parent // return the old parent so that my siblings see it + } + })(tpd.EmptyTree, tree) +} + +object TreeChecker { + /** - Check that TypeParamRefs and MethodParams refer to an enclosing type. + * - Check that all type variables are instantiated. + */ + def checkNoOrphans(tp0: Type, tree: untpd.Tree = untpd.EmptyTree)(using Context): Type = new TypeMap() { + val definedBinders = new java.util.IdentityHashMap[Type, Any] + def apply(tp: Type): Type = { + tp match { + case tp: BindingType => + definedBinders.put(tp, tp) + mapOver(tp) + definedBinders.remove(tp) + case tp: ParamRef => + assert(definedBinders.get(tp.binder) != null, s"orphan param: ${tp.show}, hash of binder = ${System.identityHashCode(tp.binder)}, tree = ${tree.show}, type = $tp0") + case tp: TypeVar => + assert(tp.isInstantiated, s"Uninstantiated type variable: ${tp.show}, tree = ${tree.show}") + apply(tp.underlying) + case _ => + mapOver(tp) + } + tp + } + }.apply(tp0) + + /** Run some additional checks on the nodes of the trees. Specifically: + * + * - TypeTree can only appear in TypeApply args, New, Typed tpt, Closure + * tpt, SeqLiteral elemtpt, ValDef tpt, DefDef tpt, and TypeDef rhs. + */ + object TreeNodeChecker extends untpd.TreeTraverser: + import untpd._ + def traverse(tree: Tree)(using Context) = tree match + case t: TypeTree => assert(assertion = false, i"TypeTree not expected: $t") + case t @ TypeApply(fun, _targs) => traverse(fun) + case t @ New(_tpt) => + case t @ Typed(expr, _tpt) => traverse(expr) + case t @ Closure(env, meth, _tpt) => traverse(env); traverse(meth) + case t @ SeqLiteral(elems, _elemtpt) => traverse(elems) + case t @ ValDef(_, _tpt, _) => traverse(t.rhs) + case t @ DefDef(_, paramss, _tpt, _) => for params <- paramss do traverse(params); traverse(t.rhs) + case t @ TypeDef(_, _rhs) => + case t @ Template(constr, _, self, _) => traverse(constr); traverse(t.parentsOrDerived); traverse(self); traverse(t.body) + case t => traverseChildren(t) + end traverse + + private[TreeChecker] def isValidJVMName(name: Name): Boolean = name.toString.forall(isValidJVMChar) + + private[TreeChecker] def isValidJVMMethodName(name: Name): Boolean = name.toString.forall(isValidJVMMethodChar) + + class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking { + import ast.tpd._ - private val nowDefinedSyms = util.HashSet[Symbol]() + protected val nowDefinedSyms = util.HashSet[Symbol]() private val patBoundSyms = util.HashSet[Symbol]() private val everDefinedSyms = MutableSymbolMap[untpd.Tree]() @@ -248,10 +302,9 @@ class TreeChecker extends Phase with SymTransformer { // case tree: untpd.Ident => // case tree: untpd.Select => // case tree: untpd.Bind => - case vd : ValDef => - assertIdentNotJavaClass(vd.forceIfLazy) - case dd : DefDef => - assertIdentNotJavaClass(dd.forceIfLazy) + case md: ValOrDefDef => + md.forceFields() + assertIdentNotJavaClass(md) // case tree: untpd.TypeDef => case Apply(fun, args) => assertIdentNotJavaClass(fun) @@ -305,9 +358,26 @@ class TreeChecker extends Phase with SymTransformer { override def excludeFromDoubleDeclCheck(sym: Symbol)(using Context): Boolean = sym.isEffectivelyErased && sym.is(Private) && !sym.initial.is(Private) + /** Check that all invariants related to Super and SuperType are met */ + def checkSuper(tree: Tree)(implicit ctx: Context): Unit = tree match + case Super(qual, mix) => + tree.tpe match + case tp @ SuperType(thistpe, supertpe) => + if (!mix.isEmpty) + assert(supertpe.isInstanceOf[TypeRef], + s"Precondition of pickling violated: the supertpe in $tp is not a TypeRef even though $tree has a non-empty mix") + case tp => + assert(false, s"The type of a Super tree must be a SuperType, but $tree has type $tp") + case _ => + tree.tpe match + case tp: SuperType => + assert(false, s"The type of a non-Super tree must not be a SuperType, but $tree has type $tp") + case _ => + override def typed(tree: untpd.Tree, pt: Type = WildcardType)(using Context): Tree = { val tpdTree = super.typed(tree, pt) Typer.assertPositioned(tree) + checkSuper(tpdTree) if (ctx.erasedTypes) // Can't be checked in earlier phases since `checkValue` is only run in // Erasure (because running it in Typer would force too much) @@ -359,7 +429,7 @@ class TreeChecker extends Phase with SymTransformer { override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree = { assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase) - assert(tree.isType || ctx.mode.is(Mode.Pattern) && untpd.isWildcardArg(tree) || !needsSelect(tree.tpe), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}") + assert(tree.isType || ctx.mode.is(Mode.Pattern) && untpd.isWildcardArg(tree) || !needsSelect(tree.typeOpt), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}") assertDefined(tree) checkNotRepeated(super.typedIdent(tree, pt)) @@ -375,10 +445,12 @@ class TreeChecker extends Phase with SymTransformer { // Polymorphic apply methods stay structural until Erasure val isPolyFunctionApply = (tree.name eq nme.apply) && tree.qualifier.typeOpt.derivesFrom(defn.PolyFunctionClass) + // Erased functions stay structural until Erasure + val isErasedFunctionApply = (tree.name eq nme.apply) && tree.qualifier.typeOpt.derivesFrom(defn.ErasedFunctionClass) // Outer selects are pickled specially so don't require a symbol val isOuterSelect = tree.name.is(OuterSelectName) val isPrimitiveArrayOp = ctx.erasedTypes && nme.isPrimitiveName(tree.name) - if !(tree.isType || isPolyFunctionApply || isOuterSelect || isPrimitiveArrayOp) then + if !(tree.isType || isPolyFunctionApply || isErasedFunctionApply || isOuterSelect || isPrimitiveArrayOp) then val denot = tree.denot assert(denot.exists, i"Selection $tree with type $tpe does not have a denotation") assert(denot.symbol.exists, i"Denotation $denot of selection $tree with type $tpe does not have a symbol, qualifier type = ${tree.qualifier.typeOpt}") @@ -400,11 +472,11 @@ class TreeChecker extends Phase with SymTransformer { sym == mbr || sym.overriddenSymbol(mbr.owner.asClass) == mbr || mbr.overriddenSymbol(sym.owner.asClass) == sym), - ex"""symbols differ for $tree - |was : $sym - |alternatives by type: $memberSyms%, % of types ${memberSyms.map(_.info)}%, % - |qualifier type : ${qualTpe} - |tree type : ${tree.typeOpt} of class ${tree.typeOpt.getClass}""") + i"""symbols differ for $tree + |was : $sym + |alternatives by type: $memberSyms%, % of types ${memberSyms.map(_.info)}%, % + |qualifier type : ${qualTpe} + |tree type : ${tree.typeOpt} of class ${tree.typeOpt.getClass}""") } checkNotRepeated(super.typedSelect(tree, pt)) @@ -441,7 +513,7 @@ class TreeChecker extends Phase with SymTransformer { val inliningPhase = ctx.base.inliningPhase inliningPhase.exists && ctx.phase.id > inliningPhase.id if isAfterInlining then - // The staging phase destroys in PCPCheckAndHeal the property that + // The staging phase destroys in CrossStageSafety the property that // tree.expr.tpe <:< pt1. A test case where this arises is run-macros/enum-nat-macro. // We should follow up why this happens. If the problem is fixed, we can // drop the isAfterInlining special case. To reproduce the problem, just @@ -461,6 +533,11 @@ class TreeChecker extends Phase with SymTransformer { i"owner chain = ${tree.symbol.ownersIterator.toList}%, %, ctxOwners = ${ctx.outersIterator.map(_.owner).toList}%, %") } + override def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { + assert(sym.info.isInstanceOf[ClassInfo | TypeBounds], i"wrong type, expect a template or type bounds for ${sym.fullName}, but found: ${sym.info}") + super.typedTypeDef(tdef, sym) + } + override def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = { val TypeDef(_, impl @ Template(constr, _, _, _)) = cdef: @unchecked assert(cdef.symbol == cls) @@ -583,6 +660,11 @@ class TreeChecker extends Phase with SymTransformer { override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = { val tree1 @ Hole(isTermHole, _, args, content, tpt) = super.typedHole(tree, pt): @unchecked + // Check that we only add the captured type `T` instead of a more complex type like `List[T]`. + // If we have `F[T]` with captured `F` and `T`, we should list `F` and `T` separately in the args. + for arg <- args do + assert(arg.isTerm || arg.tpe.isInstanceOf[TypeRef], "Expected TypeRef in Hole type args but got: " + arg.tpe) + // Check result type of the hole if isTermHole then assert(tpt.typeOpt <:< pt) else assert(tpt.typeOpt =:= pt) @@ -597,7 +679,7 @@ class TreeChecker extends Phase with SymTransformer { defn.AnyType case tpe => tpe defn.QuotedExprClass.typeRef.appliedTo(tpe) - else defn.QuotedTypeClass.typeRef.appliedTo(arg.typeOpt) + else defn.QuotedTypeClass.typeRef.appliedTo(arg.typeOpt.widenTermRefExpr) } val expectedResultType = if isTermHole then defn.QuotedExprClass.typeRef.appliedTo(tpt.typeOpt) @@ -606,7 +688,7 @@ class TreeChecker extends Phase with SymTransformer { defn.FunctionOf(List(defn.QuotesClass.typeRef), expectedResultType, isContextual = true) val expectedContentType = defn.FunctionOf(argQuotedTypes, contextualResult) - assert(content.typeOpt =:= expectedContentType) + assert(content.typeOpt =:= expectedContentType, i"unexpected content of hole\nexpected: ${expectedContentType}\nwas: ${content.typeOpt}") tree1 } @@ -641,68 +723,55 @@ class TreeChecker extends Phase with SymTransformer { override def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = tree } - /** - * Checks that `New` nodes are always wrapped inside `Select` nodes. - */ - def assertSelectWrapsNew(tree: Tree)(using Context): Unit = - (new TreeAccumulator[tpd.Tree] { - override def apply(parent: Tree, tree: Tree)(using Context): Tree = { - tree match { - case tree: New if !parent.isInstanceOf[tpd.Select] => - assert(assertion = false, i"`New` node must be wrapped in a `Select`:\n parent = ${parent.show}\n child = ${tree.show}") - case _: Annotated => - // Don't check inside annotations, since they're allowed to contain - // somewhat invalid trees. - case _ => - foldOver(tree, tree) // replace the parent when folding over the children - } - parent // return the old parent so that my siblings see it - } - })(tpd.EmptyTree, tree) -} + /** Tree checker that can be applied to a local tree. */ + class LocalChecker(phasesToCheck: Seq[Phase]) extends Checker(phasesToCheck: Seq[Phase]): + override def assertDefined(tree: untpd.Tree)(using Context): Unit = + // Only check definitions nested in the local tree + if nowDefinedSyms.contains(tree.symbol.maybeOwner) then + super.assertDefined(tree) -object TreeChecker { - /** - Check that TypeParamRefs and MethodParams refer to an enclosing type. - * - Check that all type variables are instantiated. - */ - def checkNoOrphans(tp0: Type, tree: untpd.Tree = untpd.EmptyTree)(using Context): Type = new TypeMap() { - val definedBinders = new java.util.IdentityHashMap[Type, Any] - def apply(tp: Type): Type = { - tp match { - case tp: BindingType => - definedBinders.put(tp, tp) - mapOver(tp) - definedBinders.remove(tp) - case tp: ParamRef => - assert(definedBinders.get(tp.binder) != null, s"orphan param: ${tp.show}, hash of binder = ${System.identityHashCode(tp.binder)}, tree = ${tree.show}, type = $tp0") - case tp: TypeVar => - assert(tp.isInstantiated, s"Uninstantiated type variable: ${tp.show}, tree = ${tree.show}") - apply(tp.underlying) - case _ => - mapOver(tp) - } - tp - } - }.apply(tp0) + def checkMacroGeneratedTree(original: tpd.Tree, expansion: tpd.Tree)(using Context): Unit = + if ctx.settings.XcheckMacros.value then + val checkingCtx = ctx + .fresh + .setReporter(new ThrowingReporter(ctx.reporter)) + val phases = ctx.base.allPhases.toList + val treeChecker = new LocalChecker(previousPhases(phases)) + + try treeChecker.typed(expansion)(using checkingCtx) + catch + case err: java.lang.AssertionError => + val stack = + if !ctx.settings.Ydebug.value then "\nstacktrace available when compiling with `-Ydebug`" + else if err.getStackTrace == null then " no stacktrace" + else err.getStackTrace.nn.mkString(" ", " \n", "") + + report.error( + s"""Malformed tree was found while expanding macro with -Xcheck-macros. + |The tree does not conform to the compiler's tree invariants. + | + |Macro was: + |${scala.quoted.runtime.impl.QuotesImpl.showDecompiledTree(original)} + | + |The macro returned: + |${scala.quoted.runtime.impl.QuotesImpl.showDecompiledTree(expansion)} + | + |Error: + |${err.getMessage} + |$stack + |""", + original + ) - /** Run some additional checks on the nodes of the trees. Specifically: - * - * - TypeTree can only appear in TypeApply args, New, Typed tpt, Closure - * tpt, SeqLiteral elemtpt, ValDef tpt, DefDef tpt, and TypeDef rhs. - */ - object TreeNodeChecker extends untpd.TreeTraverser: - import untpd._ - def traverse(tree: Tree)(using Context) = tree match - case t: TypeTree => assert(assertion = false, i"TypeTree not expected: $t") - case t @ TypeApply(fun, _targs) => traverse(fun) - case t @ New(_tpt) => - case t @ Typed(expr, _tpt) => traverse(expr) - case t @ Closure(env, meth, _tpt) => traverse(env); traverse(meth) - case t @ SeqLiteral(elems, _elemtpt) => traverse(elems) - case t @ ValDef(_, _tpt, _) => traverse(t.rhs) - case t @ DefDef(_, paramss, _tpt, _) => for params <- paramss do traverse(params); traverse(t.rhs) - case t @ TypeDef(_, _rhs) => - case t @ Template(constr, parents, self, _) => traverse(constr); traverse(parents); traverse(self); traverse(t.body) - case t => traverseChildren(t) - end traverse + private[TreeChecker] def previousPhases(phases: List[Phase])(using Context): List[Phase] = phases match { + case (phase: MegaPhase) :: phases1 => + val subPhases = phase.miniPhases + val previousSubPhases = previousPhases(subPhases.toList) + if (previousSubPhases.length == subPhases.length) previousSubPhases ::: previousPhases(phases1) + else previousSubPhases + case phase :: phases1 if phase ne ctx.phase => + phase :: previousPhases(phases1) + case _ => + Nil + } } diff --git a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala index 6bc2f438eb37..6fba0bca4ce3 100644 --- a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala +++ b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala @@ -145,7 +145,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { val size = tpes.size val n = nTpe.value.intValue if (n < 0 || n >= size) { - report.error("index out of bounds: " + n, nTree.underlyingArgument.srcPos) + report.error(em"index out of bounds: $n", nTree.underlyingArgument.srcPos) tree } else if (size <= MaxTupleArity) @@ -155,7 +155,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { // tup.asInstanceOf[TupleXXL].productElement(n) tup.asInstance(defn.TupleXXLClass.typeRef).select(nme.productElement).appliedTo(Literal(nTpe.value)) case (None, nTpe: ConstantType) if nTpe.value.intValue < 0 => - report.error("index out of bounds: " + nTpe.value.intValue, nTree.srcPos) + report.error(em"index out of bounds: ${nTpe.value.intValue}", nTree.srcPos) tree case _ => // No optimization, keep: diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index fc914e9b03bf..3763af243881 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -31,27 +31,32 @@ object TypeTestsCasts { import typer.Inferencing.maximizeType import typer.ProtoTypes.constrained - /** Whether `(x: X).isInstanceOf[P]` can be checked at runtime? + /** Tests whether `(x: X).isInstanceOf[P]` is uncheckable at runtime, returning the reason, + * or the empty string if it is checkable. * * First do the following substitution: * (a) replace `T @unchecked` and pattern binder types (e.g., `_$1`) in P with WildcardType * * Then check: * - * 1. if `X <:< P`, TRUE - * 2. if `P` is a singleton type, TRUE - * 3. if `P` refers to an abstract type member or type parameter, FALSE + * 1. if `X <:< P`, "" + * 2. if `P` is a singleton type, "" + * 3. if `P` refers to an abstract type member or type parameter, "it refers to an abstract type member or type parameter" * 4. if `P = Array[T]`, checkable(E, T) where `E` is the element type of `X`, defaults to `Any`. * 5. if `P` is `pre.F[Ts]` and `pre.F` refers to a class which is not `Array`: * (a) replace `Ts` with fresh type variables `Xs` * (b) constrain `Xs` with `pre.F[Xs] <:< X` - * (c) maximize `pre.F[Xs]` and check `pre.F[Xs] <:< P` + * (c) maximize `pre.F[Xs]` + * (d) if !`pre.F[Xs] <:< P`, "its type arguments can't be determined from $X" * 6. if `P = T1 | T2` or `P = T1 & T2`, checkable(X, T1) && checkable(X, T2). - * 7. if `P` is a refinement type, FALSE - * 8. if `P` is a local class which is not statically reachable from the scope where `X` is defined, FALSE - * 9. otherwise, TRUE + * 7. if `P` is a refinement type, "it's a refinement type" + * 8. if `P` is a local class which is not statically reachable from the scope where `X` is defined, "it's a local class" + * 9. otherwise, "" */ - def checkable(X: Type, P: Type, span: Span)(using Context): Boolean = atPhase(Phases.refchecksPhase.next) { + def whyUncheckable(X: Type, P: Type, span: Span)(using Context): String = atPhase(Phases.refchecksPhase.next) { + extension (inline s1: String) inline def &&(inline s2: String): String = if s1 == "" then s2 else s1 + extension (inline b: Boolean) inline def |||(inline s: String): String = if b then "" else s + // Run just before ElimOpaque transform (which follows RefChecks) def isAbstract(P: Type) = !P.dealias.typeSymbol.isClass @@ -124,10 +129,10 @@ object TypeTestsCasts { } - def recur(X: Type, P: Type): Boolean = (X <:< P) || (P.dealias match { - case _: SingletonType => true + def recur(X: Type, P: Type): String = (X <:< P) ||| (P.dealias match { + case _: SingletonType => "" case _: TypeProxy - if isAbstract(P) => false + if isAbstract(P) => i"it refers to an abstract type member or type parameter" case defn.ArrayOf(tpT) => X match { case defn.ArrayOf(tpE) => recur(tpE, tpT) @@ -147,21 +152,23 @@ object TypeTestsCasts { X.classSymbol.exists && P.classSymbol.exists && !X.classSymbol.asClass.mayHaveCommonChild(P.classSymbol.asClass) || typeArgsTrivial(X, tpe) + ||| i"its type arguments can't be determined from $X" } case AndType(tp1, tp2) => recur(X, tp1) && recur(X, tp2) case OrType(tp1, tp2) => recur(X, tp1) && recur(X, tp2) case AnnotatedType(t, _) => recur(X, t) - case tp2: RefinedType => recur(X, tp2.parent) && TypeComparer.hasMatchingMember(tp2.refinedName, X, tp2) + case tp2: RefinedType => recur(X, tp2.parent) + && (TypeComparer.hasMatchingMember(tp2.refinedName, X, tp2) ||| i"it's a refinement type") case tp2: RecType => recur(X, tp2.parent) case _ if P.classSymbol.isLocal && foundClasses(X).exists(P.classSymbol.isInaccessibleChildOf) => // 8 - false - case _ => true + i"it's a local class" + case _ => "" }) - val res = X.widenTermRefExpr.hasAnnotation(defn.UncheckedAnnot) || recur(X.widen, replaceP(P)) + val res = recur(X.widen, replaceP(P)) - debug.println(i"checking ${X.show} isInstanceOf ${P} = $res") + debug.println(i"checking $X isInstanceOf $P = $res") res } @@ -234,7 +241,7 @@ object TypeTestsCasts { val foundEffectiveClass = effectiveClass(expr.tpe.widen) if foundEffectiveClass.isPrimitiveValueClass && !testCls.isPrimitiveValueClass then - report.error(i"cannot test if value of $exprType is a reference of $testCls", tree.srcPos) + report.error(em"cannot test if value of $exprType is a reference of $testCls", tree.srcPos) false else foundClasses.exists(check) end checkSensical @@ -338,7 +345,7 @@ object TypeTestsCasts { val testWidened = testType.widen defn.untestableClasses.find(testWidened.isRef(_)) match case Some(untestable) => - report.error(i"$untestable cannot be used in runtime type tests", tree.srcPos) + report.error(em"$untestable cannot be used in runtime type tests", tree.srcPos) constant(expr, Literal(Constant(false))) case _ => val erasedTestType = erasure(testType) @@ -348,9 +355,12 @@ object TypeTestsCasts { if (sym.isTypeTest) { val argType = tree.args.head.tpe val isTrusted = tree.hasAttachment(PatternMatcher.TrustedTypeTestKey) - if (!isTrusted && !checkable(expr.tpe, argType, tree.span)) - report.uncheckedWarning(i"the type test for $argType cannot be checked at runtime", expr.srcPos) - transformTypeTest(expr, tree.args.head.tpe, + val isUnchecked = expr.tpe.widenTermRefExpr.hasAnnotation(defn.UncheckedAnnot) + if !isTrusted && !isUnchecked then + val whyNot = whyUncheckable(expr.tpe, argType, tree.span) + if whyNot.nonEmpty then + report.uncheckedWarning(em"the type test for $argType cannot be checked at runtime because $whyNot", expr.srcPos) + transformTypeTest(expr, argType, flagUnrelated = enclosingInlineds.isEmpty) // if test comes from inlined code, dont't flag it even if it always false } else if (sym.isTypeCast) diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala index 5b6e36343379..a897503ef275 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala @@ -76,7 +76,7 @@ object TypeUtils { case AndType(tp1, tp2) => // We assume that we have the following property: // (T1, T2, ..., Tn) & (U1, U2, ..., Un) = (T1 & U1, T2 & U2, ..., Tn & Un) - tp1.tupleElementTypes.zip(tp2.tupleElementTypes).map { case (t1, t2) => t1 & t2 } + tp1.tupleElementTypes.zip(tp2.tupleElementTypes).map { case (t1, t2) => t1.intersect(t2) } case OrType(tp1, tp2) => None // We can't combine the type of two tuples case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala index 56fd4f754d60..95d40102c5a7 100644 --- a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala +++ b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala @@ -53,7 +53,7 @@ class UncacheGivenAliases extends MiniPhase with IdentityDenotTransformer: */ override def transformValDef(tree: ValDef)(using Context): Tree = val sym = tree.symbol - if sym.isAllOf(Given, Lazy) && !needsCache(sym, tree.rhs) then + if sym.isAllOf(LazyGiven) && !needsCache(sym, tree.rhs) then sym.copySymDenotation( initFlags = sym.flags &~ Lazy | Method, info = ExprType(sym.info)) diff --git a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala index a86bf2c48fb5..28d1255eaa72 100644 --- a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala @@ -22,15 +22,14 @@ object ValueClasses { } def isMethodWithExtension(sym: Symbol)(using Context): Boolean = - atPhaseNoLater(extensionMethodsPhase) { - val d = sym.denot - d.validFor.containsPhaseId(ctx.phaseId) && - d.isRealMethod && - isDerivedValueClass(d.owner) && - !d.isConstructor && - !d.symbol.isSuperAccessor && - !d.is(Macro) - } + val d = sym.denot.initial + d.validFor.firstPhaseId <= extensionMethodsPhase.id + && d.isRealMethod + && isDerivedValueClass(d.owner) + && !d.isConstructor + && !d.symbol.isSuperAccessor + && !d.isInlineMethod + && !d.is(Macro) /** The member of a derived value class that unboxes it. */ def valueClassUnbox(cls: ClassSymbol)(using Context): Symbol = diff --git a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala index ba42d826fe82..8080a7c911b3 100644 --- a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala +++ b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala @@ -61,6 +61,7 @@ class YCheckPositions extends Phase { private def isMacro(call: Tree)(using Context) = call.symbol.is(Macro) || + (call.symbol.isClass && call.tpe.derivesFrom(defn.MacroAnnotationClass)) || // The call of a macro after typer is encoded as a Select while other inlines are Ident // TODO remove this distinction once Inline nodes of expanded macros can be trusted (also in Inliner.inlineCallTrace) (!(ctx.phase <= postTyperPhase) && call.isInstanceOf[Select]) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Cache.scala b/compiler/src/dotty/tools/dotc/transform/init/Cache.scala new file mode 100644 index 000000000000..054db090c3bc --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Cache.scala @@ -0,0 +1,201 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* + +import ast.tpd +import tpd.Tree + +/** The co-inductive cache used for analysis + * + * The cache contains two maps from `(Config, Tree)` to `Res`: + * + * - input cache (`this.last`) + * - output cache (`this.current`) + * + * The two caches are required because we want to make sure in a new iteration, + * an expression is evaluated exactly once. The monotonicity of the analysis + * ensures that the cache state goes up the lattice of the abstract domain, + * consequently the algorithm terminates. + * + * The general skeleton for usage of the cache is as follows + * + * def analysis(entryExp: Expr) = { + * def iterate(entryExp: Expr)(using Cache) = + * eval(entryExp, initConfig) + * if cache.hasChanged && noErrors then + * cache.last = cache.current + * cache.current = Empty + * cache.changed = false + * iterate(entryExp) + * else + * reportErrors + * + * + * def eval(expr: Expr, config: Config)(using Cache) = + * cache.cachedEval(config, expr) { + * // Actual recursive evaluation of expression. + * // + * // Only executed if the entry `(exp, config)` is not in the output cache. + * } + * + * iterate(entryExp)(using new Cache) + * } + * + * See the documentation for the method `Cache.cachedEval` for more information. + * + * What goes to the configuration (`Config`) and what goes to the result (`Res`) + * need to be decided by the specific analysis and justified by reasoning about + * soundness. + * + * @param Config The analysis state that matters for evaluating an expression. + * @param Res The result from the evaluation the given expression. + */ +class Cache[Config, Res]: + import Cache.* + + /** The cache for expression values from last iteration */ + protected var last: ExprValueCache[Config, Res] = Map.empty + + /** The output cache for expression values + * + * The output cache is computed based on the cache values `last` from the + * last iteration. + * + * Both `last` and `current` are required to make sure an encountered + * expression is evaluated once in each iteration. + */ + protected var current: ExprValueCache[Config, Res] = Map.empty + + /** Whether the current heap is different from the last heap? + * + * `changed == false` implies that the fixed point has been reached. + */ + protected var changed: Boolean = false + + /** Whether any value in the output cache (this.current) was accessed + * after being added. If no cached values are used after they are added + * for the first time then another iteration of analysis is not needed. + */ + protected var cacheUsed: Boolean = false + + /** Used to avoid allocation, its state does not matter */ + protected given MutableTreeWrapper = new MutableTreeWrapper + + def get(config: Config, expr: Tree): Option[Res] = + val res = current.get(config, expr) + cacheUsed = cacheUsed || res.nonEmpty + res + + /** Evaluate an expression with cache + * + * The algorithmic skeleton is as follows: + * + * if don't cache result then + * return eval(expr) + * if this.current.contains(config, expr) then + * return cached value + * else + * val assumed = this.last(config, expr) or bottom value if absent + * this.current(config, expr) = assumed + * val actual = eval(expr) + * + * if assumed != actual then + * this.changed = true + * this.current(config, expr) = actual + * + */ + def cachedEval(config: Config, expr: Tree, cacheResult: Boolean, default: Res)(eval: Tree => Res): Res = + if !cacheResult then + eval(expr) + else + this.get(config, expr) match + case Some(value) => value + case None => + val assumeValue: Res = + this.last.get(config, expr) match + case Some(value) => value + case None => + this.last = this.last.updatedNested(config, expr, default) + default + + this.current = this.current.updatedNested(config, expr, assumeValue) + + val actual = eval(expr) + if actual != assumeValue then + // println("Changed! from = " + assumeValue + ", to = " + actual) + this.changed = true + this.current = this.current.updatedNested(config, expr, actual) + // this.current = this.current.removed(config, expr) + end if + + actual + end if + end cachedEval + + def hasChanged = changed + + def isUsed = cacheUsed + + /** Prepare cache for the next iteration + * + * 1. Reset changed flag. + * + * 2. Use current cache as last cache and set current cache to be empty. + */ + def prepareForNextIteration()(using Context) = + this.changed = false + this.cacheUsed = false + this.last = this.current + this.current = Map.empty +end Cache + +object Cache: + type ExprValueCache[Config, Res] = Map[Config, Map[TreeWrapper, Res]] + + /** A wrapper for trees for storage in maps based on referential equality of trees. */ + abstract class TreeWrapper: + def tree: Tree + + override final def equals(other: Any): Boolean = + other match + case that: TreeWrapper => this.tree eq that.tree + case _ => false + + override final def hashCode = tree.hashCode + + /** The immutable wrapper is intended to be stored as key in the heap. */ + class ImmutableTreeWrapper(val tree: Tree) extends TreeWrapper + + /** For queries on the heap, reuse the same wrapper to avoid unnecessary allocation. + * + * A `MutableTreeWrapper` is only ever used temporarily for querying a map, + * and is never inserted to the map. + */ + class MutableTreeWrapper extends TreeWrapper: + var queryTree: Tree | Null = null + def tree: Tree = queryTree match + case tree: Tree => tree + case null => ??? + + extension [Config, Res](cache: ExprValueCache[Config, Res]) + def get(config: Config, expr: Tree)(using queryWrapper: MutableTreeWrapper): Option[Res] = + queryWrapper.queryTree = expr + cache.get(config).flatMap(_.get(queryWrapper)) + + def removed(config: Config, expr: Tree)(using queryWrapper: MutableTreeWrapper) = + queryWrapper.queryTree = expr + val innerMap2 = cache(config).removed(queryWrapper) + cache.updated(config, innerMap2) + + def updatedNested(config: Config, expr: Tree, result: Res): ExprValueCache[Config, Res] = + val wrapper = new ImmutableTreeWrapper(expr) + updatedNestedWrapper(config, wrapper, result) + + def updatedNestedWrapper(config: Config, wrapper: ImmutableTreeWrapper, result: Res): ExprValueCache[Config, Res] = + val innerMap = cache.getOrElse(config, Map.empty[TreeWrapper, Res]) + val innerMap2 = innerMap.updated(wrapper, result) + cache.updated(config, innerMap2) + end extension diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index a8ade3acae71..1efb3c88149e 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -15,10 +15,11 @@ import StdNames._ import dotty.tools.dotc.transform._ import Phases._ +import scala.collection.mutable import Semantic._ -class Checker extends Phase { +class Checker extends Phase: override def phaseName: String = Checker.name @@ -31,17 +32,23 @@ class Checker extends Phase { override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = val checkCtx = ctx.fresh.setPhase(this.start) - Semantic.checkTasks(using checkCtx) { - val traverser = new InitTreeTraverser() - units.foreach { unit => traverser.traverse(unit.tpdTree) } - } + val traverser = new InitTreeTraverser() + units.foreach { unit => traverser.traverse(unit.tpdTree) } + val classes = traverser.getClasses() + + Semantic.checkClasses(classes)(using checkCtx) + units - def run(using Context): Unit = { + def run(using Context): Unit = // ignore, we already called `Semantic.check()` in `runOn` - } + () + + class InitTreeTraverser extends TreeTraverser: + private val classes: mutable.ArrayBuffer[ClassSymbol] = new mutable.ArrayBuffer + + def getClasses(): List[ClassSymbol] = classes.toList - class InitTreeTraverser(using WorkList) extends TreeTraverser { override def traverse(tree: Tree)(using Context): Unit = traverseChildren(tree) tree match { @@ -53,29 +60,12 @@ class Checker extends Phase { mdef match case tdef: TypeDef if tdef.isClassDef => val cls = tdef.symbol.asClass - val thisRef = ThisRef(cls) - if shouldCheckClass(cls) then Semantic.addTask(thisRef) + classes.append(cls) case _ => case _ => } - } - - private def shouldCheckClass(cls: ClassSymbol)(using Context) = { - val instantiable: Boolean = - cls.is(Flags.Module) || - !cls.isOneOf(Flags.AbstractOrTrait) && { - // see `Checking.checkInstantiable` in typer - val tp = cls.appliedRef - val stp = SkolemType(tp) - val selfType = cls.givenSelfType.asSeenFrom(stp, cls) - !selfType.exists || stp <:< selfType - } - - // A concrete class may not be instantiated if the self type is not satisfied - instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass - } -} + end InitTreeTraverser object Checker: val name: String = "initChecker" diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala index 7d92d2b2a921..366fd6be96a2 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala @@ -5,109 +5,64 @@ package init import ast.tpd._ import core._ -import util.SourcePosition import util.Property -import Decorators._, printing.SyntaxHighlighting +import util.SourcePosition import Types._, Symbols._, Contexts._ -import scala.collection.mutable +import Trace.Trace object Errors: private val IsFromPromotion = new Property.Key[Boolean] sealed trait Error: - def trace: Seq[Tree] + def trace: Trace def show(using Context): String - def pos(using Context): SourcePosition = trace.last.sourcePos + def pos(using Context): SourcePosition = Trace.position(using trace).sourcePos def stacktrace(using Context): String = val preamble: String = if ctx.property(IsFromPromotion).nonEmpty then " Promotion trace:\n" else " Calling trace:\n" - buildStacktrace(trace, preamble) + Trace.buildStacktrace(trace, preamble) def issue(using Context): Unit = report.warning(show, this.pos) end Error - def buildStacktrace(trace: Seq[Tree], preamble: String)(using Context): String = if trace.isEmpty then "" else preamble + { - var lastLineNum = -1 - var lines: mutable.ArrayBuffer[String] = new mutable.ArrayBuffer - trace.foreach { tree => - val pos = tree.sourcePos - val prefix = "-> " - val line = - if pos.source.exists then - val loc = "[ " + pos.source.file.name + ":" + (pos.line + 1) + " ]" - val code = SyntaxHighlighting.highlight(pos.lineContent.trim.nn) - i"$code\t$loc" - else - tree.show - val positionMarkerLine = - if pos.exists && pos.source.exists then - positionMarker(pos) - else "" - - // always use the more precise trace location - if lastLineNum == pos.line then - lines.dropRightInPlace(1) - - lines += (prefix + line + "\n" + positionMarkerLine) - - lastLineNum = pos.line - } - val sb = new StringBuilder - for line <- lines do sb.append(line) - sb.toString - } - - /** Used to underline source positions in the stack trace - * pos.source must exist - */ - private def positionMarker(pos: SourcePosition): String = - val trimmed = pos.lineContent.takeWhile(c => c.isWhitespace).length - val padding = pos.startColumnPadding.substring(trimmed).nn + " " - val carets = - if (pos.startLine == pos.endLine) - "^" * math.max(1, pos.endColumn - pos.startColumn) - else "^" - - s"$padding$carets\n" - override def toString() = this.getClass.getName.nn /** Access non-initialized field */ - case class AccessNonInit(field: Symbol)(val trace: Seq[Tree]) extends Error: - def source: Tree = trace.last + case class AccessNonInit(field: Symbol)(val trace: Trace) extends Error: + def source: Tree = Trace.position(using trace) def show(using Context): String = "Access non-initialized " + field.show + "." + stacktrace override def pos(using Context): SourcePosition = field.sourcePos /** Promote a value under initialization to fully-initialized */ - case class PromoteError(msg: String)(val trace: Seq[Tree]) extends Error: + case class PromoteError(msg: String)(val trace: Trace) extends Error: def show(using Context): String = msg + stacktrace - case class AccessCold(field: Symbol)(val trace: Seq[Tree]) extends Error: + case class AccessCold(field: Symbol)(val trace: Trace) extends Error: def show(using Context): String = - "Access field " + field.show + " on a cold object." + stacktrace + "Access field " + field.show + " on an uninitialized (Cold) object." + stacktrace - case class CallCold(meth: Symbol)(val trace: Seq[Tree]) extends Error: + case class CallCold(meth: Symbol)(val trace: Trace) extends Error: def show(using Context): String = - "Call method " + meth.show + " on a cold object." + stacktrace + "Call method " + meth.show + " on an uninitialized (Cold) object." + stacktrace - case class CallUnknown(meth: Symbol)(val trace: Seq[Tree]) extends Error: + case class CallUnknown(meth: Symbol)(val trace: Trace) extends Error: def show(using Context): String = val prefix = if meth.is(Flags.Method) then "Calling the external method " else "Accessing the external field" prefix + meth.show + " may cause initialization errors." + stacktrace /** Promote a value under initialization to fully-initialized */ - case class UnsafePromotion(msg: String, error: Error)(val trace: Seq[Tree]) extends Error: + case class UnsafePromotion(msg: String, error: Error)(val trace: Trace) extends Error: def show(using Context): String = msg + stacktrace + "\n" + - "Promoting the value to hot (transitively initialized) failed due to the following problem:\n" + { + "Promoting the value to transitively initialized (Hot) failed due to the following problem:\n" + { val ctx2 = ctx.withProperty(IsFromPromotion, Some(true)) error.show(using ctx2) } @@ -116,7 +71,7 @@ object Errors: * * Invariant: argsIndices.nonEmpty */ - case class UnsafeLeaking(error: Error, nonHotOuterClass: Symbol, argsIndices: List[Int])(val trace: Seq[Tree]) extends Error: + case class UnsafeLeaking(error: Error, nonHotOuterClass: Symbol, argsIndices: List[Int])(val trace: Trace) extends Error: def show(using Context): String = "Problematic object instantiation: " + argumentInfo() + stacktrace + "\n" + "It leads to the following error during object initialization:\n" + @@ -141,5 +96,5 @@ object Errors: acc + text2 } val verb = if multiple then " are " else " is " - val adjective = "not hot (transitively initialized)." + val adjective = "not transitively initialized (Hot)." subject + verb + adjective diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index e4f6832f7695..4548dccb598f 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -15,10 +15,19 @@ import config.Printers.init as printer import reporting.trace as log import Errors.* +import Trace.* +import Util.* +import Cache.* import scala.collection.mutable import scala.annotation.tailrec +/** + * Checks safe initialization of objects + * + * This algorithm cannot handle safe access of global object names. That part + * is handled by the check in `Objects` (@see Objects). + */ object Semantic: // ----- Domain definitions -------------------------------- @@ -55,16 +64,18 @@ object Semantic: sealed abstract class Value: def show(using Context): String = this match case ThisRef(klass) => - "ThisRef[" + klass.show + "]" + "the original object of type (" + klass.show + ") where initialization checking started" case Warm(klass, outer, ctor, args) => val argsText = if args.nonEmpty then ", args = " + args.map(_.show).mkString("(", ", ", ")") else "" - "Warm[" + klass.show + "] { outer = " + outer.show + argsText + " }" + "a non-transitively initialized (Warm) object of type (" + klass.show + ") { outer = " + outer.show + argsText + " }" case Fun(expr, thisV, klass) => - "Fun { this = " + thisV.show + ", owner = " + klass.show + " }" + "a function where \"this\" is (" + thisV.show + ")" case RefSet(values) => values.map(_.show).mkString("Set { ", ", ", " }") - case _ => - this.toString() + case Hot => + "a transitively initialized (Hot) object" + case Cold => + "an uninitialized (Cold) object" def isHot = this == Hot def isCold = this == Cold @@ -117,7 +128,7 @@ object Semantic: assert(!populatingParams, "the object is already populating parameters") populatingParams = true val tpl = klass.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] - extendTrace(klass.defTree) { this.callConstructor(ctor, args.map(arg => ArgInfo(arg, trace))) } + extendTrace(klass.defTree) { this.callConstructor(ctor, args.map(arg => new ArgInfo(arg, trace))) } populatingParams = false this } @@ -207,7 +218,7 @@ object Semantic: object Cache: /** Cache for expressions * - * Ref -> Tree -> Value + * Value -> Tree -> Value * * The first key is the value of `this` for the expression. * @@ -233,66 +244,27 @@ object Semantic: * that could be reused to check other classes. We employ this trick to * improve performance of the analysis. */ - private type ExprValueCache = Map[Value, Map[TreeWrapper, Value]] /** The heap for abstract objects * - * The heap objects are immutable. - */ - private type Heap = Map[Ref, Objekt] - - /** A wrapper for trees for storage in maps based on referential equality of trees. */ - private abstract class TreeWrapper: - def tree: Tree - - override final def equals(other: Any): Boolean = - other match - case that: TreeWrapper => this.tree eq that.tree - case _ => false - - override final def hashCode = tree.hashCode - - /** The immutable wrapper is intended to be stored as key in the heap. */ - private class ImmutableTreeWrapper(val tree: Tree) extends TreeWrapper - - /** For queries on the heap, reuse the same wrapper to avoid unnecessary allocation. + * The heap objects are immutable and its values are essentially derived + * from the cache, thus they are not part of the configuration. * - * A `MutableTreeWrapper` is only ever used temporarily for querying a map, - * and is never inserted to the map. + * The only exception is the object correspond to `ThisRef`, where the + * object remembers the set of initialized fields. That information is reset + * in each iteration thus is harmless. */ - private class MutableTreeWrapper extends TreeWrapper: - var queryTree: Tree | Null = null - def tree: Tree = queryTree match - case tree: Tree => tree - case null => ??? - - class Cache: - /** The cache for expression values from last iteration */ - private var last: ExprValueCache = Map.empty + private type Heap = Map[Ref, Objekt] - /** The output cache for expression values - * - * The output cache is computed based on the cache values `last` from the - * last iteration. - * - * Both `last` and `current` are required to make sure an encountered - * expression is evaluated once in each iteration. - */ - private var current: ExprValueCache = Map.empty + class Data extends Cache[Value, Value]: /** Global cached values for expressions * * The values are only added when a fixed point is reached. * * It is intended to improve performance for computation related to warm values. */ - private var stable: ExprValueCache = Map.empty - - /** Whether the current heap is different from the last heap? - * - * `changed == false` implies that the fixed point has been reached. - */ - private var changed: Boolean = false + private var stable: ExprValueCache[Value, Value] = Map.empty /** Abstract heap stores abstract objects * @@ -320,77 +292,38 @@ object Semantic: /** Used to revert heap to last stable heap. */ private var heapStable: Heap = Map.empty - /** Used to avoid allocation, its state does not matter */ - private given MutableTreeWrapper = new MutableTreeWrapper - - def hasChanged = changed - - def get(value: Value, expr: Tree): Option[Value] = - current.get(value, expr) match - case None => stable.get(value, expr) + override def get(value: Value, expr: Tree): Option[Value] = + stable.get(value, expr) match + case None => super.get(value, expr) case res => res /** Backup the state of the cache * * All the shared data structures must be immutable. */ - def backup(): Cache = - val cache = new Cache - cache.last = this.last - cache.current = this.current + def backup(): Data = + val cache = new Data cache.stable = this.stable cache.heap = this.heap cache.heapStable = this.heapStable cache.changed = this.changed + cache.last = this.last + cache.current = this.current cache /** Restore state from a backup */ - def restore(cache: Cache) = + def restore(cache: Data) = + this.changed = cache.changed this.last = cache.last this.current = cache.current this.stable = cache.stable this.heap = cache.heap this.heapStable = cache.heapStable - this.changed = cache.changed - - /** Copy the value of `(value, expr)` from the last cache to the current cache - * - * It assumes the value is `Hot` if it doesn't exist in the last cache. - * - * It updates the current caches if the values change. - * - * The two caches are required because we want to make sure in a new iteration, an expression is evaluated once. - */ - def assume(value: Value, expr: Tree, cacheResult: Boolean)(fun: => Value): Contextual[Value] = - val assumeValue: Value = - last.get(value, expr) match - case Some(value) => value - case None => - this.last = last.updatedNested(value, expr, Hot) - Hot - - this.current = current.updatedNested(value, expr, assumeValue) - - val actual = fun - if actual != assumeValue then - this.changed = true - this.current = this.current.updatedNested(value, expr, actual) - else - // It's tempting to cache the value in stable, but it's unsound. - // The reason is that the current value may depend on other values - // which might change. - // - // stable.put(value, expr, actual) - () - end if - - actual - end assume /** Commit current cache to stable cache. */ private def commitToStableCache() = for - (v, m) <- current + (v, m) <- this.current if v.isWarm // It's useless to cache value for ThisRef. (wrapper, res) <- m do @@ -404,10 +337,8 @@ object Semantic: * * 3. Revert heap to stable. */ - def prepareForNextIteration()(using Context) = - this.changed = false - this.last = this.current - this.current = Map.empty + override def prepareForNextIteration()(using Context) = + super.prepareForNextIteration() this.heap = this.heapStable /** Prepare for checking next class @@ -421,15 +352,15 @@ object Semantic: * 4. Reset last cache. */ def prepareForNextClass()(using Context) = - if this.changed then - this.changed = false + if this.hasChanged then this.heap = this.heapStable else this.commitToStableCache() this.heapStable = this.heap - this.last = Map.empty - this.current = Map.empty + // reset changed and cache + super.prepareForNextIteration() + def updateObject(ref: Ref, obj: Objekt) = assert(!this.heapStable.contains(ref)) @@ -438,59 +369,19 @@ object Semantic: def containsObject(ref: Ref) = heap.contains(ref) def getObject(ref: Ref) = heap(ref) - end Cache - - extension (cache: ExprValueCache) - private def get(value: Value, expr: Tree)(using queryWrapper: MutableTreeWrapper): Option[Value] = - queryWrapper.queryTree = expr - cache.get(value).flatMap(_.get(queryWrapper)) - - private def removed(value: Value, expr: Tree)(using queryWrapper: MutableTreeWrapper) = - queryWrapper.queryTree = expr - val innerMap2 = cache(value).removed(queryWrapper) - cache.updated(value, innerMap2) - - private def updatedNested(value: Value, expr: Tree, result: Value): ExprValueCache = - val wrapper = new ImmutableTreeWrapper(expr) - updatedNestedWrapper(value, wrapper, result) - - private def updatedNestedWrapper(value: Value, wrapper: ImmutableTreeWrapper, result: Value): ExprValueCache = - val innerMap = cache.getOrElse(value, Map.empty[TreeWrapper, Value]) - val innerMap2 = innerMap.updated(wrapper, result) - cache.updated(value, innerMap2) - end extension - end Cache + end Data - import Cache.* + end Cache - inline def cache(using c: Cache): Cache = c + inline def cache(using c: Cache.Data): Cache.Data = c // ----- Checker State ----------------------------------- /** The state that threads through the interpreter */ - type Contextual[T] = (Context, Trace, Promoted, Cache, Reporter) ?=> T + type Contextual[T] = (Context, Trace, Promoted, Cache.Data, Reporter) ?=> T // ----- Error Handling ----------------------------------- - object Trace: - opaque type Trace = Vector[Tree] - - val empty: Trace = Vector.empty - - extension (trace: Trace) - def add(node: Tree): Trace = trace :+ node - def toVector: Vector[Tree] = trace - - def show(using trace: Trace, ctx: Context): String = buildStacktrace(trace, "\n") - - def position(using trace: Trace): Tree = trace.last - type Trace = Trace.Trace - - import Trace.* - def trace(using t: Trace): Trace = t - inline def withTrace[T](t: Trace)(op: Trace ?=> T): T = op(using t) - inline def extendTrace[T](node: Tree)(using t: Trace)(op: Trace ?=> T): T = op(using t.add(node)) - /** Error reporting */ trait Reporter: def report(err: Error): Unit @@ -508,7 +399,7 @@ object Semantic: /** * Revert the cache to previous state. */ - def abort()(using Cache): Unit + def abort()(using Cache.Data): Unit def errors: List[Error] object Reporter: @@ -517,8 +408,8 @@ object Semantic: def errors = buf.toList def report(err: Error) = buf += err - class TryBufferedReporter(backup: Cache) extends BufferedReporter with TryReporter: - def abort()(using Cache): Unit = cache.restore(backup) + class TryBufferedReporter(backup: Cache.Data) extends BufferedReporter with TryReporter: + def abort()(using Cache.Data): Unit = cache.restore(backup) class ErrorFound(val error: Error) extends Exception class StopEarlyReporter extends Reporter: @@ -529,7 +420,7 @@ object Semantic: * The TryReporter cannot be thrown away: either `abort` must be called or * the errors must be reported. */ - def errorsIn(fn: Reporter ?=> Unit)(using Cache): TryReporter = + def errorsIn(fn: Reporter ?=> Unit)(using Cache.Data): TryReporter = val reporter = new TryBufferedReporter(cache.backup()) fn(using reporter) reporter @@ -544,7 +435,7 @@ object Semantic: catch case ex: ErrorFound => ex.error :: Nil - def hasErrors(fn: Reporter ?=> Unit)(using Cache): Boolean = + def hasErrors(fn: Reporter ?=> Unit)(using Cache.Data): Boolean = val backup = cache.backup() val errors = stopEarly(fn) cache.restore(backup) @@ -581,7 +472,7 @@ object Semantic: def widenArg: Contextual[Value] = a match case _: Ref | _: Fun => - val hasError = Reporter.hasErrors { a.promote("Argument cannot be promoted to hot") } + val hasError = Reporter.hasErrors { a.promote("Argument is not provably transitively initialized (Hot)") } if hasError then Cold else Hot case RefSet(refs) => @@ -606,14 +497,14 @@ object Semantic: case _ => cache.getObject(ref) - def ensureObjectExists()(using Cache): ref.type = + def ensureObjectExists()(using Cache.Data): ref.type = if cache.containsObject(ref) then printer.println("object " + ref + " already exists") ref else ensureFresh() - def ensureFresh()(using Cache): ref.type = + def ensureFresh()(using Cache.Data): ref.type = val obj = Objekt(ref.klass, fields = Map.empty, outers = Map(ref.klass -> ref.outer)) printer.println("reset object " + ref) cache.updateObject(ref, obj) @@ -664,7 +555,7 @@ object Semantic: Hot case Cold => - val error = AccessCold(field)(trace.toVector) + val error = AccessCold(field)(trace) reporter.report(error) Hot @@ -689,11 +580,11 @@ object Semantic: val rhs = target.defTree.asInstanceOf[ValOrDefDef].rhs eval(rhs, ref, target.owner.asClass, cacheResult = true) else - val error = CallUnknown(field)(trace.toVector) + val error = CallUnknown(field)(trace) reporter.report(error) Hot else - val error = AccessNonInit(target)(trace.toVector) + val error = AccessNonInit(target)(trace) reporter.report(error) Hot else @@ -779,7 +670,7 @@ object Semantic: case Cold => promoteArgs() - val error = CallCold(meth)(trace.toVector) + val error = CallCold(meth)(trace) reporter.report(error) Hot @@ -818,9 +709,11 @@ object Semantic: // no source code available promoteArgs() // try promoting the receiver as last resort - val hasErrors = Reporter.hasErrors { ref.promote("try promote value to hot") } + val hasErrors = Reporter.hasErrors { + ref.promote(ref.show + " has no source code and is not provably transitively initialized (Hot).") + } if hasErrors then - val error = CallUnknown(target)(trace.toVector) + val error = CallUnknown(target)(trace) reporter.report(error) Hot else if target.exists then @@ -855,7 +748,7 @@ object Semantic: // init "fake" param fields for parameters of primary and secondary constructors def addParamsAsFields(args: List[Value], ref: Ref, ctorDef: DefDef) = val params = ctorDef.termParamss.flatten.map(_.symbol) - assert(args.size == params.size, "arguments = " + args.size + ", params = " + params.size) + assert(args.size == params.size, "arguments = " + args.size + ", params = " + params.size + ", ctor = " + ctor.show) for (param, value) <- params.zip(args) do ref.updateField(param, value) printer.println(param.show + " initialized with " + value) @@ -899,7 +792,7 @@ object Semantic: Hot else // no source code available - val error = CallUnknown(ctor)(trace.toVector) + val error = CallUnknown(ctor)(trace) reporter.report(error) Hot } @@ -922,7 +815,7 @@ object Semantic: yield i + 1 - val error = UnsafeLeaking(errors.head, nonHotOuterClass, indices)(trace.toVector) + val error = UnsafeLeaking(errors.head, nonHotOuterClass, indices)(trace) reporter.report(error) Hot else @@ -947,7 +840,7 @@ object Semantic: tryLeak(warm, NoSymbol, args2) case Cold => - val error = CallCold(ctor)(trace.toVector) + val error = CallCold(ctor)(trace) reporter.report(error) Hot @@ -1004,7 +897,7 @@ object Semantic: case Cold => Cold - case ref: Ref => eval(vdef.rhs, ref, enclosingClass) + case ref: Ref => eval(vdef.rhs, ref, enclosingClass, cacheResult = sym.is(Flags.Lazy)) case _ => report.error("[Internal error] unexpected this value when accessing local variable, sym = " + sym.show + ", thisValue = " + thisValue2.show + Trace.show, Trace.position) @@ -1078,7 +971,7 @@ object Semantic: case Hot => case Cold => - reporter.report(PromoteError(msg)(trace.toVector)) + reporter.report(PromoteError(msg)(trace)) case thisRef: ThisRef => val emptyFields = thisRef.nonInitFields() @@ -1086,7 +979,7 @@ object Semantic: promoted.promoteCurrent(thisRef) else val fields = "Non initialized field(s): " + emptyFields.map(_.show).mkString(", ") + "." - reporter.report(PromoteError(msg + "\n" + fields)(trace.toVector)) + reporter.report(PromoteError(msg + "\n" + fields)(trace)) case warm: Warm => if !promoted.contains(warm) then @@ -1100,13 +993,13 @@ object Semantic: val errors = Reporter.stopEarly { val res = { given Trace = Trace.empty - eval(body, thisV, klass) + eval(body, thisV, klass, cacheResult = true) } given Trace = Trace.empty.add(body) - res.promote("The function return value is not hot. Found = " + res.show + ".") + res.promote("Only transitively initialized (Hot) values can be returned by functions. The function " + fun.show + " returns " + res.show + ".") } if errors.nonEmpty then - reporter.report(UnsafePromotion(msg, errors.head)(trace.toVector)) + reporter.report(UnsafePromotion(msg, errors.head)(trace)) else promoted.add(fun) @@ -1147,7 +1040,7 @@ object Semantic: // // This invariant holds because of the Scala/Java/JVM restriction that we cannot use `this` in super constructor calls. if subClassSegmentHot && !isHotSegment then - report.error("[Internal error] Expect current segment to hot in promotion, current klass = " + klass.show + + report.error("[Internal error] Expect current segment to be transitively initialized (Hot) in promotion, current klass = " + klass.show + ", subclass = " + subClass.show + Trace.show, Trace.position) // If the outer and parameters of a class are all hot, then accessing fields and methods of the current @@ -1156,20 +1049,20 @@ object Semantic: if !isHotSegment then for member <- klass.info.decls do if member.isClass then - val error = PromoteError("Promotion cancelled as the value contains inner " + member.show + ".")(Vector.empty) + val error = PromoteError("Promotion cancelled as the value contains inner " + member.show + ".")(Trace.empty) reporter.report(error) else if !member.isType && !member.isConstructor && !member.is(Flags.Deferred) then given Trace = Trace.empty if member.is(Flags.Method, butNot = Flags.Accessor) then - val args = member.info.paramInfoss.flatten.map(_ => ArgInfo(Hot, Trace.empty)) + val args = member.info.paramInfoss.flatten.map(_ => new ArgInfo(Hot: Value, Trace.empty)) val res = warm.call(member, args, receiver = warm.klass.typeRef, superType = NoType) withTrace(trace.add(member.defTree)) { - res.promote("Cannot prove that the return value of " + member.show + " is hot. Found = " + res.show + ".") + res.promote("Could not verify that the return value of " + member.show + " is transitively initialized (Hot). It was found to be " + res.show + ".") } else val res = warm.select(member, receiver = warm.klass.typeRef) withTrace(trace.add(member.defTree)) { - res.promote("Cannot prove that the field " + member.show + " is hot. Found = " + res.show + ".") + res.promote("Could not verify that the field " + member.show + " is transitively initialized (Hot). It was found to be " + res.show + ".") } end for @@ -1189,7 +1082,7 @@ object Semantic: } if errors.isEmpty then Nil - else UnsafePromotion(msg, errors.head)(trace.toVector) :: Nil + else UnsafePromotion(msg, errors.head)(trace) :: Nil } end extension @@ -1206,79 +1099,56 @@ object Semantic: cls == defn.AnyValClass || cls == defn.ObjectClass -// ----- Work list --------------------------------------------------- - case class Task(value: ThisRef) - - class WorkList private[Semantic](): - private val pendingTasks: mutable.ArrayBuffer[Task] = new mutable.ArrayBuffer - - def addTask(task: Task): Unit = - if !pendingTasks.contains(task) then pendingTasks.append(task) - - /** Process the worklist until done */ - final def work()(using Cache, Context): Unit = - for task <- pendingTasks - do doTask(task) - - /** Check an individual class - * - * This method should only be called from the work list scheduler. - */ - private def doTask(task: Task)(using Cache, Context): Unit = - val thisRef = task.value - val tpl = thisRef.klass.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] - - @tailrec - def iterate(): Unit = { - given Promoted = Promoted.empty(thisRef.klass) - given Trace = Trace.empty.add(thisRef.klass.defTree) - given reporter: Reporter.BufferedReporter = new Reporter.BufferedReporter +// ----- API -------------------------------- - thisRef.ensureFresh() + /** Check an individual class + * + * The class to be checked must be an instantiable concrete class. + */ + private def checkClass(classSym: ClassSymbol)(using Cache.Data, Context): Unit = + val thisRef = ThisRef(classSym) + val tpl = classSym.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] - // set up constructor parameters - for param <- tpl.constr.termParamss.flatten do - thisRef.updateField(param.symbol, Hot) + @tailrec + def iterate(): Unit = { + given Promoted = Promoted.empty(classSym) + given Trace = Trace.empty.add(classSym.defTree) + given reporter: Reporter.BufferedReporter = new Reporter.BufferedReporter - log("checking " + task) { eval(tpl, thisRef, thisRef.klass) } - reporter.errors.foreach(_.issue) + thisRef.ensureFresh() - if cache.hasChanged && reporter.errors.isEmpty then - // code to prepare cache and heap for next iteration - cache.prepareForNextIteration() - iterate() - else - cache.prepareForNextClass() - } + // set up constructor parameters + for param <- tpl.constr.termParamss.flatten do + thisRef.updateField(param.symbol, Hot) - iterate() - end doTask - end WorkList - inline def workList(using wl: WorkList): WorkList = wl + log("checking " + classSym) { eval(tpl, thisRef, classSym) } + reporter.errors.foreach(_.issue) -// ----- API -------------------------------- + if cache.hasChanged && reporter.errors.isEmpty && cache.isUsed then + // code to prepare cache and heap for next iteration + cache.prepareForNextIteration() + iterate() + else + cache.prepareForNextClass() + } - /** Add a checking task to the work list */ - def addTask(thisRef: ThisRef)(using WorkList) = workList.addTask(Task(thisRef)) + iterate() + end checkClass - /** Check the specified tasks - * - * Semantic.checkTasks { - * Semantic.addTask(...) - * } + /** + * Check the specified concrete classes */ - def checkTasks(using Context)(taskBuilder: WorkList ?=> Unit): Unit = - val workList = new WorkList - val cache = new Cache - taskBuilder(using workList) - workList.work()(using cache, ctx) + def checkClasses(classes: List[ClassSymbol])(using Context): Unit = + given Cache.Data() + for classSym <- classes if isConcreteClass(classSym) do + checkClass(classSym) // ----- Semantic definition -------------------------------- + type ArgInfo = TraceValue[Value] - /** Utility definition used for better error-reporting of argument errors */ - case class ArgInfo(value: Value, trace: Trace): - def promote: Contextual[Unit] = withTrace(trace) { - value.promote("Cannot prove the method argument is hot. Only hot values are safe to leak.\nFound = " + value.show + ".") + extension (arg: ArgInfo) + def promote: Contextual[Unit] = withTrace(arg.trace) { + arg.value.promote("Could not verify that the method argument is transitively initialized (Hot). It was found to be " + arg.value.show + ". Only transitively initialized arguments may be passed to methods (except constructors).") } /** Evaluate an expression with the given value for `this` in a given class `klass` @@ -1296,13 +1166,13 @@ object Semantic: * * This method only handles cache logic and delegates the work to `cases`. * - * The parameter `cacheResult` is used to reduce the size of the cache. + * @param expr The expression to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the expression is located. + * @param cacheResult It is used to reduce the size of the cache. */ def eval(expr: Tree, thisV: Ref, klass: ClassSymbol, cacheResult: Boolean = false): Contextual[Value] = log("evaluating " + expr.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) { - cache.get(thisV, expr) match - case Some(value) => value - case None => - cache.assume(thisV, expr, cacheResult) { cases(expr, thisV, klass) } + cache.cachedEval(thisV, expr, cacheResult, default = Hot) { expr => cases(expr, thisV, klass) } } /** Evaluate a list of expressions */ @@ -1319,13 +1189,17 @@ object Semantic: else eval(arg.tree, thisV, klass) - argInfos += ArgInfo(res, trace.add(arg.tree)) + argInfos += new ArgInfo(res, trace.add(arg.tree)) } argInfos.toList /** Handles the evaluation of different expressions * * Note: Recursive call should go to `eval` instead of `cases`. + * + * @param expr The expression to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the expression `expr` is located. */ def cases(expr: Tree, thisV: Ref, klass: ClassSymbol): Contextual[Value] = val trace2 = trace.add(expr) @@ -1415,12 +1289,12 @@ object Semantic: eval(qual, thisV, klass) val res = eval(rhs, thisV, klass) extendTrace(expr) { - res.ensureHot("The RHS of reassignment must be hot. Found = " + res.show + ". ") + res.ensureHot("The RHS of reassignment must be transitively initialized (Hot). It was found to be " + res.show + ". ") } case id: Ident => val res = eval(rhs, thisV, klass) extendTrace(expr) { - res.ensureHot("The RHS of reassignment must be hot. Found = " + res.show + ". ") + res.ensureHot("The RHS of reassignment must be transitively initialized (Hot). It was found to be " + res.show + ". ") } case closureDef(ddef) => @@ -1443,14 +1317,14 @@ object Semantic: case Match(selector, cases) => val res = eval(selector, thisV, klass) extendTrace(selector) { - res.ensureHot("The value to be matched needs to be hot. Found = " + res.show + ". ") + res.ensureHot("The value to be matched needs to be transitively initialized (Hot). It was found to be " + res.show + ". ") } eval(cases.map(_.body), thisV, klass).join case Return(expr, from) => val res = eval(expr, thisV, klass) extendTrace(expr) { - res.ensureHot("return expression must be hot. Found = " + res.show + ". ") + res.ensureHot("return expression must be transitively initialized (Hot). It was found to be " + res.show + ". ") } case WhileDo(cond, body) => @@ -1503,7 +1377,14 @@ object Semantic: report.error("[Internal error] unexpected tree" + Trace.show, expr) Hot - /** Handle semantics of leaf nodes */ + /** Handle semantics of leaf nodes + * + * For leaf nodes, their semantics is determined by their types. + * + * @param tp The type to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the type `tp` is located. + */ def cases(tp: Type, thisV: Ref, klass: ClassSymbol): Contextual[Value] = log("evaluating " + tp.show, printer, (_: Value).show) { tp match case _: ConstantType => @@ -1541,7 +1422,12 @@ object Semantic: Hot } - /** Resolve C.this that appear in `klass` */ + /** Resolve C.this that appear in `klass` + * + * @param target The class symbol for `C` for which `C.this` is to be resolved. + * @param thisV The value for `D.this` where `D` is represented by the parameter `klass`. + * @param klass The enclosing class where the type `C.this` is located. + */ def resolveThis(target: ClassSymbol, thisV: Value, klass: ClassSymbol): Contextual[Value] = log("resolving " + target.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) { if target == klass then thisV else if target.is(Flags.Package) then Hot @@ -1566,7 +1452,12 @@ object Semantic: } - /** Compute the outer value that correspond to `tref.prefix` */ + /** Compute the outer value that correspond to `tref.prefix` + * + * @param tref The type whose prefix is to be evaluated. + * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. + * @param klass The enclosing class where the type `tref` is located. + */ def outerValue(tref: TypeRef, thisV: Ref, klass: ClassSymbol): Contextual[Value] = val cls = tref.classSymbol.asClass if tref.prefix == NoPrefix then @@ -1577,7 +1468,12 @@ object Semantic: if cls.isAllOf(Flags.JavaInterface) then Hot else cases(tref.prefix, thisV, klass) - /** Initialize part of an abstract object in `klass` of the inheritance chain */ + /** Initialize part of an abstract object in `klass` of the inheritance chain + * + * @param tpl The class body to be evaluated. + * @param thisV The value of the current object to be initialized. + * @param klass The class to which the template belongs. + */ def init(tpl: Template, thisV: Ref, klass: ClassSymbol): Contextual[Value] = log("init " + klass.show, printer, (_: Value).show) { val paramsMap = tpl.constr.termParamss.flatten.map { vdef => vdef.name -> thisV.objekt.field(vdef.symbol) @@ -1657,9 +1553,14 @@ object Semantic: // term arguments to B. That can only be done in a concrete class. val tref = typeRefOf(klass.typeRef.baseType(mixin).typeConstructor) val ctor = tref.classSymbol.primaryConstructor - if ctor.exists then extendTrace(superParent) { - superCall(tref, ctor, Nil, tasks) - } + if ctor.exists then + // The parameter check of traits comes late in the mixin phase. + // To avoid crash we supply hot values for erroneous parent calls. + // See tests/neg/i16438.scala. + val args: List[ArgInfo] = ctor.info.paramInfoss.flatten.map(_ => new ArgInfo(Hot, Trace.empty)) + extendTrace(superParent) { + superCall(tref, ctor, args, tasks) + } } // initialize super classes after outers are set @@ -1715,70 +1616,3 @@ object Semantic: traverseChildren(tp) traverser.traverse(tpt.tpe) - -// ----- Utility methods and extractors -------------------------------- - - def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match - case tref: TypeRef => tref - case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) - - - opaque type Arg = Tree | ByNameArg - case class ByNameArg(tree: Tree) - - extension (arg: Arg) - def isByName = arg.isInstanceOf[ByNameArg] - def tree: Tree = arg match - case t: Tree => t - case ByNameArg(t) => t - - object Call: - - def unapply(tree: Tree)(using Context): Option[(Tree, List[List[Arg]])] = - tree match - case Apply(fn, args) => - val argTps = fn.tpe.widen match - case mt: MethodType => mt.paramInfos - val normArgs: List[Arg] = args.zip(argTps).map { - case (arg, _: ExprType) => ByNameArg(arg) - case (arg, _) => arg - } - unapply(fn) match - case Some((ref, args0)) => Some((ref, args0 :+ normArgs)) - case None => None - - case TypeApply(fn, targs) => - unapply(fn) - - case ref: RefTree if ref.tpe.widenSingleton.isInstanceOf[MethodicType] => - Some((ref, Nil)) - - case _ => None - - object NewExpr: - def unapply(tree: Tree)(using Context): Option[(TypeRef, New, Symbol, List[List[Arg]])] = - tree match - case Call(fn @ Select(newTree: New, init), argss) if init == nme.CONSTRUCTOR => - val tref = typeRefOf(newTree.tpe) - Some((tref, newTree, fn.symbol, argss)) - case _ => None - - object PolyFun: - def unapply(tree: Tree)(using Context): Option[Tree] = - tree match - case Block((cdef: TypeDef) :: Nil, Typed(NewExpr(tref, _, _, _), _)) - if tref.symbol.isAnonymousClass && tref <:< defn.PolyFunctionType - => - val body = cdef.rhs.asInstanceOf[Template].body - val apply = body.head.asInstanceOf[DefDef] - Some(apply.rhs) - case _ => - None - - extension (symbol: Symbol) def hasSource(using Context): Boolean = - !symbol.defTree.isEmpty - - def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show) { - if (sym.isEffectivelyFinal || sym.isConstructor) sym - else sym.matchingMember(cls.appliedRef) - } diff --git a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala new file mode 100644 index 000000000000..7dfbc0b6cfa5 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala @@ -0,0 +1,82 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* +import ast.tpd.* +import util.SourcePosition + +import Decorators._, printing.SyntaxHighlighting + +import scala.collection.mutable + +/** Logic related to evaluation trace for showing friendly error messages + * + * A trace is a sequence of program positions which tells the evaluation order + * that leads to an error. It is usually more informative than the stack trace + * by tracking the exact sub-expression in the trace instead of only methods. + */ +object Trace: + opaque type Trace = Vector[Tree] + + val empty: Trace = Vector.empty + + extension (trace: Trace) + def add(node: Tree): Trace = trace :+ node + def toVector: Vector[Tree] = trace + def ++(trace2: Trace): Trace = trace ++ trace2 + + def show(using trace: Trace, ctx: Context): String = buildStacktrace(trace, "\n") + + def position(using trace: Trace): Tree = trace.last + + def trace(using t: Trace): Trace = t + + inline def withTrace[T](t: Trace)(op: Trace ?=> T): T = op(using t) + + inline def extendTrace[T](node: Tree)(using t: Trace)(op: Trace ?=> T): T = op(using t.add(node)) + + def buildStacktrace(trace: Trace, preamble: String)(using Context): String = if trace.isEmpty then "" else preamble + { + var lastLineNum = -1 + var lines: mutable.ArrayBuffer[String] = new mutable.ArrayBuffer + trace.foreach { tree => + val pos = tree.sourcePos + val prefix = "-> " + val line = + if pos.source.exists then + val loc = "[ " + pos.source.file.name + ":" + (pos.line + 1) + " ]" + val code = SyntaxHighlighting.highlight(pos.lineContent.trim.nn) + i"$code\t$loc" + else + tree.show + val positionMarkerLine = + if pos.exists && pos.source.exists then + positionMarker(pos) + else "" + + // always use the more precise trace location + if lastLineNum == pos.line then + lines.dropRightInPlace(1) + + lines += (prefix + line + "\n" + positionMarkerLine) + + lastLineNum = pos.line + } + val sb = new StringBuilder + for line <- lines do sb.append(line) + sb.toString + } + + /** Used to underline source positions in the stack trace + * pos.source must exist + */ + private def positionMarker(pos: SourcePosition): String = + val trimmed = pos.lineContent.takeWhile(c => c.isWhitespace).length + val padding = pos.startColumnPadding.substring(trimmed).nn + " " + val carets = + if (pos.startLine == pos.endLine) + "^" * math.max(1, pos.endColumn - pos.startColumn) + else "^" + + s"$padding$carets\n" diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala new file mode 100644 index 000000000000..4e60c1325b09 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -0,0 +1,102 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* +import Types.* +import Symbols.* +import StdNames.* +import ast.tpd.* + +import reporting.trace as log +import config.Printers.init as printer + +import Trace.* + +object Util: + /** Utility definition used for better error-reporting of argument errors */ + case class TraceValue[T](value: T, trace: Trace) + + def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match + case tref: TypeRef => tref + case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) + + + opaque type Arg = Tree | ByNameArg + case class ByNameArg(tree: Tree) + + extension (arg: Arg) + def isByName = arg.isInstanceOf[ByNameArg] + def tree: Tree = arg match + case t: Tree => t + case ByNameArg(t) => t + + object Call: + + def unapply(tree: Tree)(using Context): Option[(Tree, List[List[Arg]])] = + tree match + case Apply(fn, args) => + val argTps = fn.tpe.widen match + case mt: MethodType => mt.paramInfos + val normArgs: List[Arg] = args.zip(argTps).map { + case (arg, _: ExprType) => ByNameArg(arg) + case (arg, _) => arg + } + unapply(fn) match + case Some((ref, args0)) => Some((ref, args0 :+ normArgs)) + case None => None + + case TypeApply(fn, targs) => + unapply(fn) + + case ref: RefTree if ref.tpe.widenSingleton.isInstanceOf[MethodicType] => + Some((ref, Nil)) + + case _ => None + + object NewExpr: + def unapply(tree: Tree)(using Context): Option[(TypeRef, New, Symbol, List[List[Arg]])] = + tree match + case Call(fn @ Select(newTree: New, init), argss) if init == nme.CONSTRUCTOR => + val tref = typeRefOf(newTree.tpe) + Some((tref, newTree, fn.symbol, argss)) + case _ => None + + object PolyFun: + def unapply(tree: Tree)(using Context): Option[Tree] = + tree match + case Block((cdef: TypeDef) :: Nil, Typed(NewExpr(tref, _, _, _), _)) + if tref.symbol.isAnonymousClass && tref <:< defn.PolyFunctionType + => + val body = cdef.rhs.asInstanceOf[Template].body + val apply = body.head.asInstanceOf[DefDef] + Some(apply.rhs) + case _ => + None + + def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show) { + if (sym.isEffectivelyFinal || sym.isConstructor) sym + else sym.matchingMember(cls.appliedRef) + } + + + extension (sym: Symbol) + def hasSource(using Context): Boolean = !sym.defTree.isEmpty + + def isStaticObject(using Context) = + sym.is(Flags.Module, butNot = Flags.Package) && sym.isStatic + + def isConcreteClass(cls: ClassSymbol)(using Context) = + val instantiable: Boolean = + cls.is(Flags.Module) || + !cls.isOneOf(Flags.AbstractOrTrait) && { + // see `Checking.checkInstantiable` in typer + val tp = cls.appliedRef + val stp = SkolemType(tp) + val selfType = cls.givenSelfType.asSeenFrom(stp, cls) + !selfType.exists || stp <:< selfType + } + + // A concrete class may not be instantiated if the self type is not satisfied + instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index a8432833d42a..3e05310d7249 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -9,7 +9,7 @@ import TypeUtils._ import Contexts._ import Flags._ import ast._ -import Decorators._ +import Decorators.{ show => _, * } import Symbols._ import StdNames._ import NameOps._ @@ -22,9 +22,13 @@ import transform.SymUtils._ import reporting._ import config.Printers.{exhaustivity => debug} import util.{SrcPos, NoSourcePosition} -import collection.mutable -/** Space logic for checking exhaustivity and unreachability of pattern matching +import scala.annotation.internal.sharable +import scala.collection.mutable + +import SpaceEngine.* + +/* Space logic for checking exhaustivity and unreachability of pattern matching * * Space can be thought of as a set of possible values. A type or a pattern * both refer to spaces. The space of a type is the values that inhabit the @@ -53,9 +57,32 @@ import collection.mutable * */ - /** space definition */ -sealed trait Space +sealed trait Space: + + @sharable private val isSubspaceCache = mutable.HashMap.empty[Space, Boolean] + + def isSubspace(b: Space)(using Context): Boolean = + val a = this + val a2 = a.simplify + val b2 = b.simplify + if (a ne a2) || (b ne b2) then a2.isSubspace(b2) + else if a == Empty then true + else if b == Empty then false + else trace(s"isSubspace(${show(this)}, ${show(b)})", debug) { + isSubspaceCache.getOrElseUpdate(b, computeIsSubspace(a, b)) + } + + @sharable private var mySimplified: Space | Null = null + + def simplify(using Context): Space = + val simplified = mySimplified + if simplified == null then + val simplified = SpaceEngine.computeSimplify(this) + mySimplified = simplified + simplified + else simplified +end Space /** Empty space */ case object Empty extends Space @@ -66,7 +93,21 @@ case object Empty extends Space * @param decomposed: does the space result from decomposition? Used for pretty print * */ -case class Typ(tp: Type, decomposed: Boolean = true) extends Space +case class Typ(tp: Type, decomposed: Boolean = true) extends Space: + private var myDecompose: List[Typ] | Null = null + + def canDecompose(using Context): Boolean = decompose != ListOfTypNoType + + def decompose(using Context): List[Typ] = + val decompose = myDecompose + if decompose == null then + val decompose = tp match + case Parts(parts) => parts.map(Typ(_, decomposed = true)) + case _ => ListOfTypNoType + myDecompose = decompose + decompose + else decompose +end Typ /** Space representing an extractor pattern */ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space @@ -74,59 +115,28 @@ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space /** Union of spaces */ case class Or(spaces: Seq[Space]) extends Space -/** abstract space logic */ -trait SpaceLogic { - /** Is `tp1` a subtype of `tp2`? */ - def isSubType(tp1: Type, tp2: Type): Boolean - - /** True if we can assume that the two unapply methods are the same. - * That is, given the same parameter, they return the same result. - * - * We assume that unapply methods are pure, but the same method may - * be called with different prefixes, thus behaving differently. - */ - def isSameUnapply(tp1: TermRef, tp2: TermRef): Boolean - - /** Return a space containing the values of both types. - * - * The types should be atomic (non-decomposable) and unrelated (neither - * should be a subtype of the other). - */ - def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type): Space - - /** Is the type `tp` decomposable? i.e. all values of the type can be covered - * by its decomposed types. - * - * Abstract sealed class, OrType, Boolean and Java enums can be decomposed. - */ - def canDecompose(tp: Type): Boolean - - /** Return term parameter types of the extractor `unapp` */ - def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int): List[Type] - - /** Get components of decomposable types */ - def decompose(tp: Type): List[Typ] - - /** Whether the extractor covers the given type */ - def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int): Boolean +object SpaceEngine { + import tpd._ - /** Display space in string format */ - def show(sp: Space): String + def simplify(space: Space)(using Context): Space = space.simplify + def isSubspace(a: Space, b: Space)(using Context): Boolean = a.isSubspace(b) + def canDecompose(typ: Typ)(using Context): Boolean = typ.canDecompose + def decompose(typ: Typ)(using Context): List[Typ] = typ.decompose /** Simplify space such that a space equal to `Empty` becomes `Empty` */ - def simplify(space: Space)(using Context): Space = trace(s"simplify ${show(space)} --> ", debug, show)(space match { + def computeSimplify(space: Space)(using Context): Space = trace(s"simplify ${show(space)} --> ", debug, show)(space match { case Prod(tp, fun, spaces) => - val sps = spaces.map(simplify(_)) - if (sps.contains(Empty)) Empty - else if (canDecompose(tp) && decompose(tp).isEmpty) Empty - else Prod(tp, fun, sps) + val sps = spaces.mapconserve(simplify) + if sps.contains(Empty) then Empty + else if decompose(tp).isEmpty then Empty + else if sps eq spaces then space else Prod(tp, fun, sps) case Or(spaces) => - val spaces2 = spaces.map(simplify(_)).filter(_ != Empty) + val spaces2 = spaces.map(simplify).filter(_ != Empty) if spaces2.isEmpty then Empty - else if spaces2.lengthCompare(1) == 0 then spaces2.head - else Or(spaces2) - case Typ(tp, _) => - if (canDecompose(tp) && decompose(tp).isEmpty) Empty + else if spaces2.lengthIs == 1 then spaces2.head + else if spaces2.corresponds(spaces)(_ eq _) then space else Or(spaces2) + case typ: Typ => + if decompose(typ).isEmpty then Empty else space case _ => space }) @@ -164,119 +174,98 @@ trait SpaceLogic { List(space) } - /** Is `a` a subspace of `b`? Equivalent to `a - b == Empty`, but faster */ - def isSubspace(a: Space, b: Space)(using Context): Boolean = trace(s"isSubspace(${show(a)}, ${show(b)})", debug) { - def tryDecompose1(tp: Type) = canDecompose(tp) && isSubspace(Or(decompose(tp)), b) - def tryDecompose2(tp: Type) = canDecompose(tp) && isSubspace(a, Or(decompose(tp))) - - (simplify(a), simplify(b)) match { + /** Is `a` a subspace of `b`? Equivalent to `simplify(simplify(a) - simplify(b)) == Empty`, but faster */ + def computeIsSubspace(a: Space, b: Space)(using Context): Boolean = { + val a2 = simplify(a) + val b2 = simplify(b) + if (a ne a2) || (b ne b2) then isSubspace(a2, b2) + else (a, b) match { case (Empty, _) => true case (_, Empty) => false - case (Or(ss), _) => - ss.forall(isSubspace(_, b)) - case (Typ(tp1, _), Typ(tp2, _)) => + case (Or(ss), _) => ss.forall(isSubspace(_, b)) + case (a @ Typ(tp1, _), Or(ss)) => // optimization: don't go to subtraction too early + ss.exists(isSubspace(a, _)) + || canDecompose(a) && isSubspace(Or(decompose(a)), b) + case (_, Or(_)) => simplify(minus(a, b)) == Empty + case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => isSubType(tp1, tp2) - || canDecompose(tp1) && tryDecompose1(tp1) - || canDecompose(tp2) && tryDecompose2(tp2) - case (Typ(tp1, _), Or(ss)) => // optimization: don't go to subtraction too early - ss.exists(isSubspace(a, _)) || tryDecompose1(tp1) - case (_, Or(_)) => - simplify(minus(a, b)) == Empty + || canDecompose(a) && isSubspace(Or(decompose(a)), b) + || canDecompose(b) && isSubspace(a, Or(decompose(b))) case (Prod(tp1, _, _), Typ(tp2, _)) => isSubType(tp1, tp2) case (Typ(tp1, _), Prod(tp2, fun, ss)) => isSubType(tp1, tp2) && covers(fun, tp1, ss.length) - && isSubspace(Prod(tp2, fun, signature(fun, tp2, ss.length).map(Typ(_, false))), b) + && isSubspace(Prod(tp2, fun, signature(fun, tp1, ss.length).map(Typ(_, false))), b) case (Prod(_, fun1, ss1), Prod(_, fun2, ss2)) => - isSameUnapply(fun1, fun2) && ss1.zip(ss2).forall((isSubspace _).tupled) + isSameUnapply(fun1, fun2) && ss1.lazyZip(ss2).forall(isSubspace) } } /** Intersection of two spaces */ def intersect(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} & ${show(b)}", debug, show) { - def tryDecompose1(tp: Type) = intersect(Or(decompose(tp)), b) - def tryDecompose2(tp: Type) = intersect(a, Or(decompose(tp))) - (a, b) match { case (Empty, _) | (_, Empty) => Empty case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filter(_ ne Empty)) case (Or(ss), _) => Or(ss.map(intersect(_, b)).filter(_ ne Empty)) - case (Typ(tp1, _), Typ(tp2, _)) => - if (isSubType(tp1, tp2)) a - else if (isSubType(tp2, tp1)) b - else if (canDecompose(tp1)) tryDecompose1(tp1) - else if (canDecompose(tp2)) tryDecompose2(tp2) - else intersectUnrelatedAtomicTypes(tp1, tp2) - case (Typ(tp1, _), Prod(tp2, fun, ss)) => - if (isSubType(tp2, tp1)) b - else if (canDecompose(tp1)) tryDecompose1(tp1) - else if (isSubType(tp1, tp2)) a // problematic corner case: inheriting a case class - else intersectUnrelatedAtomicTypes(tp1, tp2) match - case Typ(tp, _) => Prod(tp, fun, ss) - case sp => sp - case (Prod(tp1, fun, ss), Typ(tp2, _)) => - if (isSubType(tp1, tp2)) a - else if (canDecompose(tp2)) tryDecompose2(tp2) - else if (isSubType(tp2, tp1)) a // problematic corner case: inheriting a case class - else intersectUnrelatedAtomicTypes(tp1, tp2) match - case Typ(tp, _) => Prod(tp, fun, ss) - case sp => sp - case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => - if (!isSameUnapply(fun1, fun2)) intersectUnrelatedAtomicTypes(tp1, tp2) match - case Typ(tp, _) => Prod(tp, fun1, ss1) - case sp => sp - else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty - else Prod(tp1, fun1, ss1.zip(ss2).map((intersect _).tupled)) + case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => + if isSubType(tp1, tp2) then a + else if isSubType(tp2, tp1) then b + else if canDecompose(a) then intersect(Or(decompose(a)), b) + else if canDecompose(b) then intersect(a, Or(decompose(b))) + else intersectUnrelatedAtomicTypes(tp1, tp2)(a) + case (a @ Typ(tp1, _), Prod(tp2, fun, ss)) => + if isSubType(tp2, tp1) then b + else if canDecompose(a) then intersect(Or(decompose(a)), b) + else if isSubType(tp1, tp2) then a // problematic corner case: inheriting a case class + else intersectUnrelatedAtomicTypes(tp1, tp2)(b) + case (Prod(tp1, fun, ss), b @ Typ(tp2, _)) => + if isSubType(tp1, tp2) then a + else if canDecompose(b) then intersect(a, Or(decompose(b))) + else if isSubType(tp2, tp1) then a // problematic corner case: inheriting a case class + else intersectUnrelatedAtomicTypes(tp1, tp2)(a) + case (a @ Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => + if !isSameUnapply(fun1, fun2) then intersectUnrelatedAtomicTypes(tp1, tp2)(a) + else if ss1.lazyZip(ss2).exists((a, b) => simplify(intersect(a, b)) == Empty) then Empty + else Prod(tp1, fun1, ss1.lazyZip(ss2).map(intersect)) } } /** The space of a not covered by b */ def minus(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} - ${show(b)}", debug, show) { - def tryDecompose1(tp: Type) = minus(Or(decompose(tp)), b) - def tryDecompose2(tp: Type) = minus(a, Or(decompose(tp))) - (a, b) match { case (Empty, _) => Empty case (_, Empty) => a - case (Typ(tp1, _), Typ(tp2, _)) => - if (isSubType(tp1, tp2)) Empty - else if (canDecompose(tp1)) tryDecompose1(tp1) - else if (canDecompose(tp2)) tryDecompose2(tp2) + case (Or(ss), _) => Or(ss.map(minus(_, b))) + case (_, Or(ss)) => ss.foldLeft(a)(minus) + case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => + if isSubType(tp1, tp2) then Empty + else if canDecompose(a) then minus(Or(decompose(a)), b) + else if canDecompose(b) then minus(a, Or(decompose(b))) else a - case (Typ(tp1, _), Prod(tp2, fun, ss)) => + case (a @ Typ(tp1, _), Prod(tp2, fun, ss)) => // rationale: every instance of `tp1` is covered by `tp2(_)` if isSubType(tp1, tp2) && covers(fun, tp1, ss.length) then minus(Prod(tp1, fun, signature(fun, tp1, ss.length).map(Typ(_, false))), b) - else if canDecompose(tp1) then - tryDecompose1(tp1) - else - a - case (Or(ss), _) => - Or(ss.map(minus(_, b))) - case (_, Or(ss)) => - ss.foldLeft(a)(minus) - case (Prod(tp1, fun, ss), Typ(tp2, _)) => + else if canDecompose(a) then minus(Or(decompose(a)), b) + else a + case (Prod(tp1, fun, ss), b @ Typ(tp2, _)) => // uncovered corner case: tp2 :< tp1, may happen when inheriting case class - if (isSubType(tp1, tp2)) - Empty - else if (simplify(a) == Empty) - Empty - else if (canDecompose(tp2)) - tryDecompose2(tp2) - else - a - case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => - if (!isSameUnapply(fun1, fun2)) return a - if (fun1.symbol.name == nme.unapply && ss1.length != ss2.length) return a - - val range = (0 until ss1.size).toList + if isSubType(tp1, tp2) then Empty + else if simplify(a) == Empty then Empty + else if canDecompose(b) then minus(a, Or(decompose(b))) + else a + case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) + if !isSameUnapply(fun1, fun2) => a + case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) + if fun1.symbol.name == nme.unapply && ss1.length != ss2.length => a + case (a @ Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => + val range = ss1.indices.toList val cache = Array.fill[Space | Null](ss2.length)(null) def sub(i: Int) = if cache(i) == null then cache(i) = minus(ss1(i), ss2(i)) cache(i).nn - end sub if range.exists(i => isSubspace(ss1(i), sub(i))) then a else if cache.forall(sub => isSubspace(sub.nn, Empty)) then Empty @@ -288,9 +277,6 @@ trait SpaceLogic { Or(spaces) } } -} - -object SpaceEngine { /** Is the unapply or unapplySeq irrefutable? * @param unapp The unapply function reference @@ -306,6 +292,7 @@ object SpaceEngine { val isEmptyTp = extractorMemberType(unappResult, nme.isEmpty, NoSourcePosition) isEmptyTp <:< ConstantType(Constant(false)) } + || unappResult.derivesFrom(defn.NonEmptyTupleClass) } /** Is the unapply or unapplySeq irrefutable? @@ -316,26 +303,42 @@ object SpaceEngine { case funRef: TermRef => isIrrefutable(funRef, argLen) case _: ErrorType => false } -} -/** Scala implementation of space logic */ -class SpaceEngine(using Context) extends SpaceLogic { - import tpd._ - - private val scalaSeqFactoryClass = defn.SeqFactoryClass - private val scalaListType = defn.ListClass.typeRef - private val scalaNilType = defn.NilModule.termRef - private val scalaConsType = defn.ConsClass.typeRef - - private val constantNullType = ConstantType(Constant(null)) + /** Is this an `'{..}` or `'[..]` irrefutable quoted patterns? + * @param unapp The unapply function tree + * @param implicits The implicits of the unapply + * @param pt The scrutinee type + */ + def isIrrefutableQuotedPattern(unapp: tpd.Tree, implicits: List[tpd.Tree], pt: Type)(using Context): Boolean = { + implicits.headOption match + // pattern '{ $x: T } + case Some(tpd.Apply(tpd.Select(tpd.Quoted(tpd.TypeApply(fn, List(tpt))), nme.apply), _)) + if unapp.symbol.owner.eq(defn.QuoteMatching_ExprMatchModule) + && fn.symbol.eq(defn.QuotedRuntimePatterns_patternHole) => + pt <:< defn.QuotedExprClass.typeRef.appliedTo(tpt.tpe) + + // pattern '[T] + case Some(tpd.Apply(tpd.TypeApply(fn, List(tpt)), _)) + if unapp.symbol.owner.eq(defn.QuoteMatching_TypeMatchModule) => + pt =:= defn.QuotedTypeClass.typeRef.appliedTo(tpt.tpe) + + case _ => false + } - override def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type): Space = trace(s"atomic intersection: ${AndType(tp1, tp2).show}", debug) { + /** Return a space containing the values of both types. + * + * The types should be atomic (non-decomposable) and unrelated (neither + * should be a subtype of the other). + */ + def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type)(sp: Space)(using Context): Space = trace(i"atomic intersection: ${AndType(tp1, tp2)}", debug) { // Precondition: !isSubType(tp1, tp2) && !isSubType(tp2, tp1). if !ctx.mode.is(Mode.SafeNulls) && (tp1.isNullType || tp2.isNullType) then // Since projections of types don't include null, intersection with null is empty. Empty else - val intersection = Typ(AndType(tp1, tp2), decomposed = false) + val intersection = sp match + case sp: Prod => sp.copy(AndType(tp1, tp2)) + case _ => Typ(AndType(tp1, tp2), decomposed = false) // unrelated numeric value classes can equal each other, so let's not consider type space intersection empty if tp1.classSymbol.isNumericValueClass && tp2.classSymbol.isNumericValueClass then intersection else if isPrimToBox(tp1, tp2) || isPrimToBox(tp2, tp1) then intersection @@ -344,7 +347,7 @@ class SpaceEngine(using Context) extends SpaceLogic { } /** Return the space that represents the pattern `pat` */ - def project(pat: Tree): Space = pat match { + def project(pat: Tree)(using Context): Space = trace(i"project($pat ${pat.className} ${pat.tpe})", debug, show)(pat match { case Literal(c) => if (c.value.isInstanceOf[Symbol]) Typ(c.value.asInstanceOf[Symbol].termRef, decomposed = false) @@ -371,7 +374,7 @@ class SpaceEngine(using Context) extends SpaceLogic { val funRef = fun1.tpe.asInstanceOf[TermRef] if (fun.symbol.name == nme.unapplySeq) val (arity, elemTp, resultTp) = unapplySeqInfo(fun.tpe.widen.finalResultType, fun.srcPos) - if (fun.symbol.owner == scalaSeqFactoryClass && scalaListType.appliedTo(elemTp) <:< pat.tpe) + if (fun.symbol.owner == defn.SeqFactoryClass && defn.ListType.appliedTo(elemTp) <:< pat.tpe) // The exhaustivity and reachability logic already handles decomposing sum types (into its subclasses) // and product types (into its components). To get better counter-examples for patterns that are of type // List (or a super-type of list, like LinearSeq) we project them into spaces that use `::` and Nil. @@ -405,14 +408,14 @@ class SpaceEngine(using Context) extends SpaceLogic { case _ => // Pattern is an arbitrary expression; assume a skolem (i.e. an unknown value) of the pattern type Typ(pat.tpe.narrow, decomposed = false) - } + }) - private def project(tp: Type): Space = tp match { + private def project(tp: Type)(using Context): Space = tp match { case OrType(tp1, tp2) => Or(project(tp1) :: project(tp2) :: Nil) case tp => Typ(tp, decomposed = true) } - private def unapplySeqInfo(resTp: Type, pos: SrcPos): (Int, Type, Type) = { + private def unapplySeqInfo(resTp: Type, pos: SrcPos)(using Context): (Int, Type, Type) = { var resultTp = resTp var elemTp = unapplySeqTypeElemTp(resultTp) var arity = productArity(resultTp, pos) @@ -459,15 +462,23 @@ class SpaceEngine(using Context) extends SpaceLogic { * If `isValue` is true, then pattern-bound symbols are erased to its upper bound. * This is needed to avoid spurious unreachable warnings. See tests/patmat/i6197.scala. */ - private def erase(tp: Type, inArray: Boolean = false, isValue: Boolean = false): Type = trace(i"$tp erased to", debug) { + private def erase(tp: Type, inArray: Boolean = false, isValue: Boolean = false)(using Context): Type = + trace(i"erase($tp${if inArray then " inArray" else ""}${if isValue then " isValue" else ""})", debug)(tp match { + case tp @ AppliedType(tycon, args) if tycon.typeSymbol.isPatternBound => + WildcardType - tp match { case tp @ AppliedType(tycon, args) => - if tycon.typeSymbol.isPatternBound then return WildcardType - val args2 = - if (tycon.isRef(defn.ArrayClass)) args.map(arg => erase(arg, inArray = true, isValue = false)) - else args.map(arg => erase(arg, inArray = false, isValue = false)) + if tycon.isRef(defn.ArrayClass) then + args.map(arg => erase(arg, inArray = true, isValue = false)) + else tycon.typeParams.lazyZip(args).map { (tparam, arg) => + if isValue && tparam.paramVarianceSign == 0 then + // when matching against a value, + // any type argument for an invariant type parameter will be unchecked, + // meaning it won't fail to match against anything; thus the wildcard replacement + WildcardType + else erase(arg, inArray = false, isValue = false) + } tp.derivedAppliedType(erase(tycon, inArray, isValue = false), args2) case tp @ OrType(tp1, tp2) => @@ -485,48 +496,49 @@ class SpaceEngine(using Context) extends SpaceLogic { else WildcardType case _ => tp - } - } + }) /** Space of the pattern: unapplySeq(a, b, c: _*) */ - def projectSeq(pats: List[Tree]): Space = { - if (pats.isEmpty) return Typ(scalaNilType, false) + def projectSeq(pats: List[Tree])(using Context): Space = { + if (pats.isEmpty) return Typ(defn.NilType, false) val (items, zero) = if (isWildcardStarArg(pats.last)) - (pats.init, Typ(scalaListType.appliedTo(pats.last.tpe.elemType), false)) + (pats.init, Typ(defn.ListType.appliedTo(pats.last.tpe.elemType), false)) else - (pats, Typ(scalaNilType, false)) + (pats, Typ(defn.NilType, false)) - val unapplyTp = scalaConsType.classSymbol.companionModule.termRef.select(nme.unapply) + val unapplyTp = defn.ConsType.classSymbol.companionModule.termRef.select(nme.unapply) items.foldRight[Space](zero) { (pat, acc) => - val consTp = scalaConsType.appliedTo(pats.head.tpe.widen) + val consTp = defn.ConsType.appliedTo(pats.head.tpe.widen) Prod(consTp, unapplyTp, project(pat) :: acc :: Nil) } } - def isPrimToBox(tp: Type, pt: Type): Boolean = + def isPrimToBox(tp: Type, pt: Type)(using Context): Boolean = tp.isPrimitiveValueType && (defn.boxedType(tp).classSymbol eq pt.classSymbol) - private val isSubspaceCache = mutable.HashMap.empty[(Space, Space, Context), Boolean] - - override def isSubspace(a: Space, b: Space)(using Context): Boolean = - isSubspaceCache.getOrElseUpdate((a, b, ctx), super.isSubspace(a, b)) - /** Is `tp1` a subtype of `tp2`? */ - def isSubType(tp1: Type, tp2: Type): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true) { - if tp1 == constantNullType && !ctx.mode.is(Mode.SafeNulls) - then tp2 == constantNullType + def isSubType(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true) { + if tp1 == ConstantType(Constant(null)) && !ctx.mode.is(Mode.SafeNulls) + then tp2 == ConstantType(Constant(null)) else tp1 <:< tp2 } - def isSameUnapply(tp1: TermRef, tp2: TermRef): Boolean = + /** True if we can assume that the two unapply methods are the same. + * That is, given the same parameter, they return the same result. + * + * We assume that unapply methods are pure, but the same method may + * be called with different prefixes, thus behaving differently. + */ + def isSameUnapply(tp1: TermRef, tp2: TermRef)(using Context): Boolean = // always assume two TypeTest[S, T].unapply are the same if they are equal in types (tp1.prefix.isStable && tp2.prefix.isStable || tp1.symbol == defn.TypeTest_unapply) && tp1 =:= tp2 - /** Parameter types of the case class type `tp`. Adapted from `unapplyPlan` in patternMatcher */ - def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int): List[Type] = { + /** Return term parameter types of the extractor `unapp`. + * Parameter types of the case class type `tp`. Adapted from `unapplyPlan` in patternMatcher */ + def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): List[Type] = { val unappSym = unapp.symbol // println("scrutineeTp = " + scrutineeTp.show) @@ -534,16 +546,15 @@ class SpaceEngine(using Context) extends SpaceLogic { val mt: MethodType = unapp.widen match { case mt: MethodType => mt case pt: PolyType => - inContext(ctx.fresh.setExploreTyperState()) { val tvars = pt.paramInfos.map(newTypeVar(_)) val mt = pt.instantiate(tvars).asInstanceOf[MethodType] scrutineeTp <:< mt.paramInfos(0) // force type inference to infer a narrower type: could be singleton // see tests/patmat/i4227.scala mt.paramInfos(0) <:< scrutineeTp + instantiateSelected(mt, tvars) isFullyDefined(mt, ForceDegree.all) mt - } } // Case unapply: @@ -566,10 +577,10 @@ class SpaceEngine(using Context) extends SpaceLogic { if (isUnapplySeq) { val (arity, elemTp, resultTp) = unapplySeqInfo(resTp, unappSym.srcPos) - if (elemTp.exists) scalaListType.appliedTo(elemTp) :: Nil + if (elemTp.exists) defn.ListType.appliedTo(elemTp) :: Nil else { val sels = productSeqSelectors(resultTp, arity, unappSym.srcPos) - sels.init :+ scalaListType.appliedTo(sels.last) + sels.init :+ defn.ListType.appliedTo(sels.last) } } else { @@ -590,45 +601,43 @@ class SpaceEngine(using Context) extends SpaceLogic { } /** Whether the extractor covers the given type */ - def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int): Boolean = + def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): Boolean = SpaceEngine.isIrrefutable(unapp, argLen) || unapp.symbol == defn.TypeTest_unapply && { val AppliedType(_, _ :: tp :: Nil) = unapp.prefix.widen.dealias: @unchecked scrutineeTp <:< tp } /** Decompose a type into subspaces -- assume the type can be decomposed */ - def decompose(tp: Type): List[Typ] = - tp.dealias match { + def decompose(tp: Type)(using Context): List[Type] = trace(i"decompose($tp)", debug) { + def rec(tp: Type, mixins: List[Type]): List[Type] = tp.dealias match case AndType(tp1, tp2) => - def decomposeComponent(tpA: Type, tpB: Type): List[Typ] = - decompose(tpA).flatMap { - case Typ(tp, _) => - if tp <:< tpB then - Typ(tp, decomposed = true) :: Nil - else if tpB <:< tp then - Typ(tpB, decomposed = true) :: Nil - else if TypeComparer.provablyDisjoint(tp, tpB) then - Nil - else - Typ(AndType(tp, tpB), decomposed = true) :: Nil - } - - if canDecompose(tp1) then - decomposeComponent(tp1, tp2) - else - decomposeComponent(tp2, tp1) - - case OrType(tp1, tp2) => List(Typ(tp1, true), Typ(tp2, true)) - case tp if tp.isRef(defn.BooleanClass) => - List( - Typ(ConstantType(Constant(true)), true), - Typ(ConstantType(Constant(false)), true) - ) - case tp if tp.isRef(defn.UnitClass) => - Typ(ConstantType(Constant(())), true) :: Nil - case tp if tp.classSymbol.isAllOf(JavaEnumTrait) => - tp.classSymbol.children.map(sym => Typ(sym.termRef, true)) - case tp => + var tpB = tp2 + var parts = rec(tp1, tp2 :: mixins) + if parts == ListOfNoType then + tpB = tp1 + parts = rec(tp2, tp1 :: mixins) + if parts == ListOfNoType then ListOfNoType + else parts.collect: + case tp if tp <:< tpB => tp + case tp if tpB <:< tp => tpB + case tp if !TypeComparer.provablyDisjoint(tp, tpB) => AndType(tp, tpB) + + case OrType(tp1, tp2) => List(tp1, tp2) + case tp if tp.isRef(defn.BooleanClass) => List(ConstantType(Constant(true)), ConstantType(Constant(false))) + case tp if tp.isRef(defn.UnitClass) => ConstantType(Constant(())) :: Nil + case tp @ NamedType(Parts(parts), _) => parts.map(tp.derivedSelect) + case _: SingletonType => ListOfNoType + case tp if tp.classSymbol.isAllOf(JavaEnumTrait) => tp.classSymbol.children.map(_.termRef) + // the class of a java enum value is the enum class, so this must follow SingletonType to not loop infinitely + + case tp @ AppliedType(Parts(parts), targs) if tp.classSymbol.children.isEmpty => + // It might not obvious that it's OK to apply the type arguments of a parent type to child types. + // But this is guarded by `tp.classSymbol.children.isEmpty`, + // meaning we'll decompose to the same class, just not the same type. + // For instance, from i15029, `decompose((X | Y).Field[T]) = [X.Field[T], Y.Field[T]]`. + parts.map(tp.derivedAppliedType(_, targs)) + + case tp if tp.classSymbol.isDecomposableToChildren => def getChildren(sym: Symbol): List[Symbol] = sym.children.flatMap { child => if child eq sym then List(sym) // i3145: sealed trait Baz, val x = new Baz {}, Baz.children returns Baz... @@ -638,49 +647,52 @@ class SpaceEngine(using Context) extends SpaceLogic { else List(child) } val children = getChildren(tp.classSymbol) - debug.println(s"candidates for ${tp.show} : [${children.map(_.show).mkString(", ")}]") + debug.println(i"candidates for $tp : $children") val parts = children.map { sym => val sym1 = if (sym.is(ModuleClass)) sym.sourceModule else sym - val refined = TypeOps.refineUsingParent(tp, sym1) + val refined = TypeOps.refineUsingParent(tp, sym1, mixins) + debug.println(i"$sym1 refined to $refined") - debug.println(sym1.show + " refined to " + refined.show) + def inhabited(tp: Type): Boolean = tp.dealias match + case AndType(tp1, tp2) => !TypeComparer.provablyDisjoint(tp1, tp2) + case OrType(tp1, tp2) => inhabited(tp1) || inhabited(tp2) + case tp: RefinedType => inhabited(tp.parent) + case tp: TypeRef => inhabited(tp.prefix) + case _ => true - def inhabited(tp: Type): Boolean = - tp.dealias match { - case AndType(tp1, tp2) => !TypeComparer.provablyDisjoint(tp1, tp2) - case OrType(tp1, tp2) => inhabited(tp1) || inhabited(tp2) - case tp: RefinedType => inhabited(tp.parent) - case tp: TypeRef => inhabited(tp.prefix) - case _ => true - } - - if (inhabited(refined)) refined + if inhabited(refined) then refined else NoType - } filter(_.exists) + }.filter(_.exists) + debug.println(i"$tp decomposes to $parts") + parts - debug.println(s"${tp.show} decomposes to [${parts.map(_.show).mkString(", ")}]") + case _ => ListOfNoType + end rec - parts.map(Typ(_, true)) - } + rec(tp, Nil) + } - /** Abstract sealed types, or-types, Boolean and Java enums can be decomposed */ - def canDecompose(tp: Type): Boolean = - val res = tp.dealias match - case _: SingletonType => false - case _: OrType => true - case and: AndType => canDecompose(and.tp1) || canDecompose(and.tp2) - case _ => - val cls = tp.classSymbol - cls.is(Sealed) - && cls.isOneOf(AbstractOrTrait) - && !cls.hasAnonymousChild - && cls.children.nonEmpty - || cls.isAllOf(JavaEnumTrait) - || tp.isRef(defn.BooleanClass) - || tp.isRef(defn.UnitClass) - //debug.println(s"decomposable: ${tp.show} = $res") - res + extension (cls: Symbol) + /** A type is decomposable to children if it's sealed, + * abstract (or a trait) - so its not a sealed concrete class that can be instantiated on its own, + * has no anonymous children, which we wouldn't be able to name as counter-examples, + * but does have children. + * + * A sealed trait with no subclasses is considered not decomposable and thus is treated as an opaque type. + * A sealed trait with subclasses that then get removed after `refineUsingParent`, decomposes to the empty list. + * So that's why we consider whether a type has children. */ + def isDecomposableToChildren(using Context): Boolean = + cls.is(Sealed) && cls.isOneOf(AbstractOrTrait) && !cls.hasAnonymousChild && cls.children.nonEmpty + + val ListOfNoType = List(NoType) + val ListOfTypNoType = ListOfNoType.map(Typ(_, decomposed = true)) + + object Parts: + def unapply(tp: Type)(using Context): PartsExtractor = PartsExtractor(decompose(tp)) + + final class PartsExtractor(val get: List[Type]) extends AnyVal: + def isEmpty: Boolean = get == ListOfNoType /** Show friendly type name with current scope in mind * @@ -690,7 +702,7 @@ class SpaceEngine(using Context) extends SpaceLogic { * C --> C if current owner is C !!! * */ - def showType(tp: Type, showTypeArgs: Boolean = false): String = { + def showType(tp: Type, showTypeArgs: Boolean = false)(using Context): String = { val enclosingCls = ctx.owner.enclosingClass def isOmittable(sym: Symbol) = @@ -731,7 +743,7 @@ class SpaceEngine(using Context) extends SpaceLogic { } /** Whether the counterexample is satisfiable. The space is flattened and non-empty. */ - def satisfiable(sp: Space): Boolean = { + def satisfiable(sp: Space)(using Context): Boolean = { def impossible: Nothing = throw new AssertionError("`satisfiable` only accepts flattened space.") def genConstraint(space: Space): List[(Type, Type)] = space match { @@ -762,10 +774,10 @@ class SpaceEngine(using Context) extends SpaceLogic { checkConstraint(genConstraint(sp))(using ctx.fresh.setNewTyperState()) } - def show(ss: Seq[Space]): String = ss.map(show).mkString(", ") + def showSpaces(ss: Seq[Space])(using Context): String = ss.map(show).mkString(", ") /** Display spaces */ - def show(s: Space): String = { + def show(s: Space)(using Context): String = { def params(tp: Type): List[Type] = tp.classSymbol.primaryConstructor.info.firstParamTypes /** does the companion object of the given symbol have custom unapply */ @@ -779,7 +791,7 @@ class SpaceEngine(using Context) extends SpaceLogic { case Empty => "empty" case Typ(c: ConstantType, _) => "" + c.value.value case Typ(tp: TermRef, _) => - if (flattenList && tp <:< scalaNilType) "" + if (flattenList && tp <:< defn.NilType) "" else tp.symbol.showName case Typ(tp, decomposed) => @@ -787,9 +799,9 @@ class SpaceEngine(using Context) extends SpaceLogic { if (ctx.definitions.isTupleNType(tp)) params(tp).map(_ => "_").mkString("(", ", ", ")") - else if (scalaListType.isRef(sym)) + else if (defn.ListType.isRef(sym)) if (flattenList) "_*" else "_: List" - else if (scalaConsType.isRef(sym)) + else if (defn.ConsType.isRef(sym)) if (flattenList) "_, _*" else "List(_, _*)" else if (tp.classSymbol.is(Sealed) && tp.classSymbol.hasAnonymousChild) "_: " + showType(tp) + " (anonymous)" @@ -801,7 +813,7 @@ class SpaceEngine(using Context) extends SpaceLogic { case Prod(tp, fun, params) => if (ctx.definitions.isTupleNType(tp)) "(" + params.map(doShow(_)).mkString(", ") + ")" - else if (tp.isRef(scalaConsType.symbol)) + else if (tp.isRef(defn.ConsType.symbol)) if (flattenList) params.map(doShow(_, flattenList)).filter(_.nonEmpty).mkString(", ") else params.map(doShow(_, flattenList = true)).filter(!_.isEmpty).mkString("List(", ", ", ")") else { @@ -817,7 +829,7 @@ class SpaceEngine(using Context) extends SpaceLogic { doShow(s, flattenList = false) } - private def exhaustivityCheckable(sel: Tree): Boolean = { + private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = { val seen = collection.mutable.Set.empty[Type] // Possible to check everything, but be compatible with scalac by default @@ -846,8 +858,8 @@ class SpaceEngine(using Context) extends SpaceLogic { res } - /** Whehter counter-examples should be further checked? True for GADTs. */ - private def shouldCheckExamples(tp: Type): Boolean = + /** Whether counter-examples should be further checked? True for GADTs. */ + private def shouldCheckExamples(tp: Type)(using Context): Boolean = new TypeAccumulator[Boolean] { override def apply(b: Boolean, tp: Type): Boolean = tp match { case tref: TypeRef if tref.symbol.is(TypeParam) && variance != 1 => true @@ -858,7 +870,7 @@ class SpaceEngine(using Context) extends SpaceLogic { /** Return the underlying type of non-module, non-constant, non-enum case singleton types. * Also widen ExprType to its result type, and rewrap any annotation wrappers. * For example, with `val opt = None`, widen `opt.type` to `None.type`. */ - def toUnderlying(tp: Type): Type = trace(i"toUnderlying($tp)", show = true)(tp match { + def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp)", show = true)(tp match { case _: ConstantType => tp case tp: TermRef if tp.symbol.is(Module) => tp case tp: TermRef if tp.symbol.isAllOf(EnumCase) => tp @@ -868,16 +880,11 @@ class SpaceEngine(using Context) extends SpaceLogic { case _ => tp }) - def checkExhaustivity(_match: Match): Unit = { - val Match(sel, cases) = _match - debug.println(i"checking exhaustivity of ${_match}") - - if (!exhaustivityCheckable(sel)) return - - val selTyp = toUnderlying(sel.tpe).dealias + def checkExhaustivity(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then trace(i"checkExhaustivity($m)", debug) { + val selTyp = toUnderlying(m.selector.tpe).dealias debug.println(i"selTyp = $selTyp") - val patternSpace = Or(cases.foldLeft(List.empty[Space]) { (acc, x) => + val patternSpace = Or(m.cases.foldLeft(List.empty[Space]) { (acc, x) => val space = if (x.guard.isEmpty) project(x.pat) else Empty debug.println(s"${x.pat.show} ====> ${show(space)}") space :: acc @@ -894,10 +901,10 @@ class SpaceEngine(using Context) extends SpaceLogic { if uncovered.nonEmpty then val hasMore = uncovered.lengthCompare(6) > 0 val deduped = dedup(uncovered.take(6)) - report.warning(PatternMatchExhaustivity(show(deduped), hasMore), sel.srcPos) + report.warning(PatternMatchExhaustivity(showSpaces(deduped), hasMore), m.selector) } - private def redundancyCheckable(sel: Tree): Boolean = + private def redundancyCheckable(sel: Tree)(using Context): Boolean = // Ignore Expr[T] and Type[T] for unreachability as a special case. // Quote patterns produce repeated calls to the same unapply method, but with different implicit parameters. // Since we assume that repeated calls to the same unapply method overlap @@ -907,19 +914,15 @@ class SpaceEngine(using Context) extends SpaceLogic { && !sel.tpe.widen.isRef(defn.QuotedExprClass) && !sel.tpe.widen.isRef(defn.QuotedTypeClass) - def checkRedundancy(_match: Match): Unit = { - val Match(sel, _) = _match - val cases = _match.cases.toIndexedSeq - debug.println(i"checking redundancy in $_match") - - if (!redundancyCheckable(sel)) return + def checkRedundancy(m: Match)(using Context): Unit = if redundancyCheckable(m.selector) then trace(i"checkRedundancy($m)", debug) { + val cases = m.cases.toIndexedSeq - val selTyp = toUnderlying(sel.tpe).dealias + val selTyp = toUnderlying(m.selector.tpe).dealias debug.println(i"selTyp = $selTyp") val isNullable = selTyp.classSymbol.isNullableClass val targetSpace = if isNullable - then project(OrType(selTyp, constantNullType, soft = false)) + then project(OrType(selTyp, ConstantType(Constant(null)), soft = false)) else project(selTyp) debug.println(s"targetSpace: ${show(targetSpace)}") @@ -948,6 +951,7 @@ class SpaceEngine(using Context) extends SpaceLogic { for (pat <- deferred.reverseIterator) report.warning(MatchCaseUnreachable(), pat.srcPos) if pat != EmptyTree // rethrow case of catch uses EmptyTree + && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase && isSubspace(covered, prev) then { val nullOnly = isNullable && i == len - 1 && isWildcardArg(pat) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala index 8851e641122f..6471e58d4ddc 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala @@ -65,7 +65,7 @@ class AddLocalJSFakeNews extends MiniPhase { thisPhase => constant.typeValue.typeSymbol.asClass case _ => // this shouldn't happen - report.error(i"unexpected $classValueArg for the first argument to `createLocalJSClass`", classValueArg) + report.error(em"unexpected $classValueArg for the first argument to `createLocalJSClass`", classValueArg) jsdefn.JSObjectClass } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala index 3c87621413b7..705b3cc404a8 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala @@ -651,7 +651,7 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => case typeRef: TypeRef => typeRef case _ => // This should not have passed the checks in PrepJSInterop - report.error(i"class type required but found $tpe0", tree) + report.error(em"class type required but found $tpe0", tree) jsdefn.JSObjectType } val cls = tpe.typeSymbol @@ -667,7 +667,7 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => val jsclassAccessor = jsclassAccessorFor(cls) ref(NamedType(prefix, jsclassAccessor.name, jsclassAccessor.denot)) } else { - report.error(i"stable reference to a JS class required but $tpe found", tree) + report.error(em"stable reference to a JS class required but $tpe found", tree) ref(defn.Predef_undefined) } } else if (isLocalJSClass(cls)) { diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala index 817a6c5afabc..b911d7dfab96 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala @@ -13,6 +13,7 @@ import Scopes._ import Symbols._ import StdNames._ import Types._ +import Decorators.em import dotty.tools.dotc.transform.MegaPhase._ @@ -238,7 +239,7 @@ class JUnitBootstrappers extends MiniPhase { case NamedArg(name, _) => name.show(using ctx) case other => other.show(using ctx) } - report.error(s"$shownName is an unsupported argument for the JUnit @Test annotation in this position", other.sourcePos) + report.error(em"$shownName is an unsupported argument for the JUnit @Test annotation in this position", other.sourcePos) None } } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index b0de197635e9..25ab46712e70 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -189,7 +189,7 @@ object PrepJSExports { if (hasExplicitName) { annot.argumentConstantString(0).getOrElse { report.error( - s"The argument to ${annot.symbol.name} must be a literal string", + em"The argument to ${annot.symbol.name} must be a literal string", annot.arguments(0)) "dummy" } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index 75323e30bfb9..8a430991e378 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -248,9 +248,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP if (tpeSym.isJSType) { def reportError(reasonAndExplanation: String): Unit = { report.error( - "Using an anonymous function as a SAM for the JavaScript type " + - i"${tpeSym.fullName} is not allowed because " + - reasonAndExplanation, + em"Using an anonymous function as a SAM for the JavaScript type ${ + tpeSym.fullName + } is not allowed because $reasonAndExplanation", tree) } if (!tpeSym.is(Trait) || tpeSym.asClass.superClass != jsdefn.JSFunctionClass) { @@ -318,9 +318,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP nameArgs match { case List(Literal(Constant(s: String))) => if (s != "apply") - report.error(i"js.Dynamic.literal does not have a method named $s", tree) + report.error(em"js.Dynamic.literal does not have a method named $s", tree) case _ => - report.error(i"js.Dynamic.literal.${tree.symbol.name} may not be called directly", tree) + report.error(em"js.Dynamic.literal.${tree.symbol.name} may not be called directly", tree) } // TODO Warn for known duplicate property names @@ -381,7 +381,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tpe.underlyingClassRef(refinementOK = false) match { case typeRef: TypeRef if typeRef.symbol.isOneOf(Trait | ModuleClass) => - report.error(i"non-trait class type required but $tpe found", tpeArg) + report.error(em"non-trait class type required but $tpe found", tpeArg) case _ => // an error was already reported above } @@ -440,7 +440,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP * which is never valid. */ report.error( - i"${sym.name} extends ${parentSym.fullName} which does not extend js.Any.", + em"${sym.name} extends ${parentSym.fullName} which does not extend js.Any.", classDef) } } @@ -502,8 +502,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP def emitOverrideError(msg: String): Unit = { report.error( - "error overriding %s;\n %s %s".format( - infoStringWithLocation(overridden), infoString(overriding), msg), + em"""error overriding ${infoStringWithLocation(overridden)}; + | ${infoString(overriding)} $msg""", errorPos) } @@ -559,7 +559,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP for (annot <- sym.annotations) { val annotSym = annot.symbol if (isJSNativeLoadingSpecAnnot(annotSym)) - report.error(i"Traits may not have an @${annotSym.name} annotation.", annot.tree) + report.error(em"Traits may not have an @${annotSym.name} annotation.", annot.tree) } } else { checkJSNativeLoadSpecOf(treePos, sym) @@ -571,7 +571,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP def checkGlobalRefName(globalRef: String): Unit = { if (!JSGlobalRef.isValidJSGlobalRefName(globalRef)) - report.error(s"The name of a JS global variable must be a valid JS identifier (got '$globalRef')", pos) + report.error(em"The name of a JS global variable must be a valid JS identifier (got '$globalRef')", pos) } if (enclosingOwner is OwnerKind.JSNative) { @@ -585,7 +585,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP for (annot <- sym.annotations) { val annotSym = annot.symbol if (isJSNativeLoadingSpecAnnot(annotSym)) - report.error(i"Nested JS classes and objects cannot have an @${annotSym.name} annotation.", annot.tree) + report.error(em"Nested JS classes and objects cannot have an @${annotSym.name} annotation.", annot.tree) } if (sym.owner.isStaticOwner) { @@ -731,7 +731,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP if (overriddenSymbols.hasNext) { val overridden = overriddenSymbols.next() val verb = if (overridden.is(Deferred)) "implement" else "override" - report.error(i"An @js.native member cannot $verb the inherited member ${overridden.fullName}", tree) + report.error(em"An @js.native member cannot $verb the inherited member ${overridden.fullName}", tree) } tree @@ -888,6 +888,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP report.error("A non-native JS trait cannot contain private members", tree) } else if (sym.is(Lazy)) { report.error("A non-native JS trait cannot contain lazy vals", tree) + } else if (sym.is(ParamAccessor)) { + // #12621 + report.error("A non-native JS trait cannot have constructor parameters", tree) } else if (!sym.is(Deferred)) { /* Tell the back-end not to emit this thing. In fact, this only * matters for mixed-in members created from this member. @@ -974,15 +977,17 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tree.rhs match { case sel: Select if sel.symbol == jsdefn.JSPackage_native => // ok + case rhs: Ident if rhs.symbol == jsdefn.JSPackage_native => + // ok case _ => val pos = if (tree.rhs != EmptyTree) tree.rhs.srcPos else tree.srcPos report.error(s"$longKindStr may only call js.native.", pos) } - // Check that the resul type was explicitly specified + // Check that the result type was explicitly specified // (This is stronger than Scala 2, which only warns, and only if it was inferred as Nothing.) - if (tree.tpt.span.isSynthetic) - report.error(i"The type of ${tree.name} must be explicitly specified because it is JS native.", tree) + if (tree.tpt.isInstanceOf[InferredTypeTree]) + report.error(em"The type of ${tree.name} must be explicitly specified because it is JS native.", tree) } private def checkJSNativeSpecificAnnotsOnNonJSNative(memberDef: MemberDef)(using Context): Unit = { @@ -1319,7 +1324,7 @@ object PrepJSInterop { for (annotation <- sym.annotations) { if (isCompilerAnnotation(annotation)) { report.error( - i"@${annotation.symbol.fullName} is for compiler internal use only. Do not use it yourself.", + em"@${annotation.symbol.fullName} is for compiler internal use only. Do not use it yourself.", annotation.tree) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index b4f3da25fc6c..79d6501ccb2d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -6,7 +6,6 @@ import core._ import ast.{Trees, tpd, untpd, desugar} import util.Stats.record import util.{SrcPos, NoSourcePosition} -import Trees.Untyped import Contexts._ import Flags._ import Symbols._ @@ -24,12 +23,13 @@ import Inferencing._ import reporting._ import transform.TypeUtils._ import transform.SymUtils._ -import Nullables._ +import Nullables._, NullOpsDecorator.* import config.Feature import collection.mutable import config.Printers.{overload, typr, unapp} import TypeApplications._ +import Annotations.Annotation import Constants.{Constant, IntTag} import Denotations.SingleDenotation @@ -46,7 +46,7 @@ object Applications { def extractorMemberType(tp: Type, name: Name, errorPos: SrcPos)(using Context): Type = { val ref = extractorMember(tp, name) if (ref.isOverloaded) - errorType(i"Overloaded reference to $ref is not allowed in extractor", errorPos) + errorType(em"Overloaded reference to $ref is not allowed in extractor", errorPos) ref.info.widenExpr.annotatedToRepeated } @@ -210,63 +210,82 @@ object Applications { def wrapDefs(defs: mutable.ListBuffer[Tree] | Null, tree: Tree)(using Context): Tree = if (defs != null && defs.nonEmpty) tpd.Block(defs.toList, tree) else tree + /** Optionally, if `sym` is a symbol created by `resolveMapped`, i.e. representing + * a mapped alternative, the original prefix of the alternative and the number of + * skipped term parameters. + */ + private def mappedAltInfo(sym: Symbol)(using Context): Option[(Type, Int)] = + for ann <- sym.getAnnotation(defn.MappedAlternativeAnnot) yield + val AppliedType(_, pre :: ConstantType(c) :: Nil) = ann.tree.tpe: @unchecked + (pre, c.intValue) + /** Find reference to default parameter getter for parameter #n in current - * parameter list, or NoType if none was found - */ + * parameter list, or EmptyTree if none was found. + * @param fn the tree referring to the function part of this call + * @param n the index of the parameter in the parameter list of the call + * @param testOnly true iff we just to find out whether a getter exists + */ def findDefaultGetter(fn: Tree, n: Int, testOnly: Boolean)(using Context): Tree = - if fn.symbol.isTerm then + def reifyPrefix(pre: Type): Tree = pre match + case pre: SingletonType => singleton(pre, needLoad = !testOnly) + case pre if testOnly => + // In this case it is safe to skolemize now; we will produce a stable prefix for the actual call. + ref(pre.narrow) + case _ => EmptyTree + + if fn.symbol.hasDefaultParams then val meth = fn.symbol.asTerm - val receiver: Tree = methPart(fn) match { - case Select(receiver, _) => receiver - case mr => mr.tpe.normalizedPrefix match { - case mr: TermRef => ref(mr) - case mr: ThisType => singleton(mr) - case mr => - if testOnly then - // In this case it is safe to skolemize now; we will produce a stable prefix for the actual call. - ref(mr.narrow) - else - EmptyTree - } - } - val getterPrefix = - if (meth.is(Synthetic) && meth.name == nme.apply) nme.CONSTRUCTOR else meth.name - def getterName = DefaultGetterName(getterPrefix, n + numArgs(fn)) - if !meth.hasDefaultParams then - EmptyTree - else if (receiver.isEmpty) { - def findGetter(cx: Context): Tree = - if (cx eq NoContext) EmptyTree - else if (cx.scope != cx.outer.scope && - cx.denotNamed(meth.name).hasAltWith(_.symbol == meth)) { - val denot = cx.denotNamed(getterName) - if (denot.exists) ref(TermRef(cx.owner.thisType, getterName, denot)) - else findGetter(cx.outer) - } + val idx = n + numArgs(fn) + methPart(fn) match + case Select(receiver, _) => + findDefaultGetter(meth, receiver, idx) + case mr => mappedAltInfo(meth) match + case Some((pre, skipped)) => + findDefaultGetter(meth, reifyPrefix(pre), idx + skipped) + case None => + findDefaultGetter(meth, reifyPrefix(mr.tpe.normalizedPrefix), idx) + else EmptyTree // structural applies don't have symbols or defaults + end findDefaultGetter + + /** Find reference to default parameter getter for method `meth` numbered `idx` + * selected from given `receiver`, or EmptyTree if none was found. + * @param meth the called method (can be mapped by resolveMapped) + * @param receiver the receiver of the original method call, which determines + * where default getters are found + * @param idx the index of the searched for default getter, as encoded in its name + */ + def findDefaultGetter(meth: TermSymbol, receiver: Tree, idx: Int)(using Context): Tree = + val getterPrefix = + if (meth.is(Synthetic) && meth.name == nme.apply) nme.CONSTRUCTOR else meth.name + val getterName = DefaultGetterName(getterPrefix, idx) + + if receiver.isEmpty then + def findGetter(cx: Context): Tree = + if cx eq NoContext then EmptyTree + else if cx.scope != cx.outer.scope + && cx.denotNamed(meth.name).hasAltWith(_.symbol == meth) then + val denot = cx.denotNamed(getterName) + if denot.exists then ref(TermRef(cx.owner.thisType, getterName, denot)) else findGetter(cx.outer) - findGetter(ctx) - } - else { - def selectGetter(qual: Tree): Tree = { - val getterDenot = qual.tpe.member(getterName) - if (getterDenot.exists) qual.select(TermRef(qual.tpe, getterName, getterDenot)) - else EmptyTree - } - if (!meth.isClassConstructor) - selectGetter(receiver) - else { - // default getters for class constructors are found in the companion object - val cls = meth.owner - val companion = cls.companionModule - if (companion.isTerm) { - val prefix = receiver.tpe.baseType(cls).normalizedPrefix - if (prefix.exists) selectGetter(ref(TermRef(prefix, companion.asTerm))) - else EmptyTree - } + else findGetter(cx.outer) + findGetter(ctx) + else + def selectGetter(qual: Tree): Tree = + val getterDenot = qual.tpe.member(getterName) + .accessibleFrom(qual.tpe.widenIfUnstable, superAccess = true) // to reset Local + if (getterDenot.exists) qual.select(TermRef(qual.tpe, getterName, getterDenot)) + else EmptyTree + if !meth.isClassConstructor then + selectGetter(receiver) + else + // default getters for class constructors are found in the companion object + val cls = meth.owner + val companion = cls.companionModule + if companion.isTerm then + val prefix = receiver.tpe.baseType(cls).normalizedPrefix + if prefix.exists then selectGetter(ref(TermRef(prefix, companion.asTerm))) else EmptyTree - } - } - else EmptyTree // structural applies don't have symbols or defaults + else EmptyTree end findDefaultGetter /** Splice new method reference `meth` into existing application `app` */ @@ -322,6 +341,12 @@ object Applications { val getter = findDefaultGetter(fn, n, testOnly) if getter.isEmpty then getter else spliceMeth(getter.withSpan(fn.span), fn) + + def retypeSignaturePolymorphicFn(fun: Tree, methType: Type)(using Context): Tree = + val sym1 = fun.symbol + val flags2 = sym1.flags | NonMember // ensures Select typing doesn't let TermRef#withPrefix revert the type + val sym2 = sym1.copy(info = methType, flags = flags2) // symbol not entered, to avoid overload resolution problems + fun.withType(sym2.termRef) } trait Applications extends Compatibility { @@ -419,10 +444,17 @@ trait Applications extends Compatibility { /** The function's type after widening and instantiating polytypes * with TypeParamRefs in constraint set */ - @threadUnsafe lazy val methType: Type = liftedFunType.widen match { - case funType: MethodType => funType - case funType: PolyType => instantiateWithTypeVars(funType) - case tp => tp //was: funType + @threadUnsafe lazy val methType: Type = { + def rec(t: Type): Type = { + t.widen match{ + case funType: MethodType => funType + case funType: PolyType => + rec(instantiateWithTypeVars(funType)) + case tp => tp + } + } + + rec(liftedFunType) } @threadUnsafe lazy val liftedFunType: Type = @@ -460,7 +492,7 @@ trait Applications extends Compatibility { matchArgs(orderedArgs, methType.paramInfos, 0) case _ => if (methType.isError) ok = false - else fail(s"$methString does not take parameters") + else fail(em"$methString does not take parameters") } /** The application was successful */ @@ -472,7 +504,7 @@ trait Applications extends Compatibility { i"${err.refStr(methRef)}$infoStr" /** Re-order arguments to correctly align named arguments */ - def reorder[T >: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { + def reorder[T <: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { /** @param pnames The list of parameter names that are missing arguments * @param args The list of arguments that are not yet passed, or that are waiting to be dropped @@ -500,9 +532,9 @@ trait Applications extends Compatibility { else { // name not (or no longer) available for named arg def msg = if (methodType.paramNames contains aname) - s"parameter $aname of $methString is already instantiated" + em"parameter $aname of $methString is already instantiated" else - s"$methString does not have a parameter $aname" + em"$methString does not have a parameter $aname" fail(msg, arg.asInstanceOf[Arg]) arg :: handleNamed(pnamesRest, args1, nameToArg, toDrop) } @@ -529,7 +561,7 @@ trait Applications extends Compatibility { /** Is `sym` a constructor of a Java-defined annotation? */ def isJavaAnnotConstr(sym: Symbol): Boolean = - sym.is(JavaDefined) && sym.isConstructor && sym.owner.derivesFrom(defn.AnnotationClass) + sym.is(JavaDefined) && sym.isConstructor && sym.owner.is(JavaAnnotation) /** Match re-ordered arguments against formal parameters * @param n The position of the first parameter in formals in `methType`. @@ -561,15 +593,12 @@ trait Applications extends Compatibility { else formals1 - def missingArg(n: Int): Unit = { - val pname = methodType.paramNames(n) - fail( - if (pname.firstPart contains '$') s"not enough arguments for $methString" - else s"missing argument for parameter $pname of $methString") - } + def missingArg(n: Int): Unit = + fail(MissingArgument(methodType.paramNames(n), methString)) def tryDefault(n: Int, args1: List[Arg]): Unit = { val sym = methRef.symbol + val testOnly = this.isInstanceOf[TestApplication[?]] val defaultArg = if (isJavaAnnotConstr(sym)) { @@ -585,12 +614,14 @@ trait Applications extends Compatibility { else EmptyTree } - else defaultArgument(normalizedFun, n, this.isInstanceOf[TestApplication[?]]) + else defaultArgument(normalizedFun, n, testOnly) def implicitArg = implicitArgTree(formal, appPos.span) if !defaultArg.isEmpty then - matchArgs(args1, addTyped(treeToArg(defaultArg)), n + 1) + defaultArg.tpe.widen match + case _: MethodOrPoly if testOnly => matchArgs(args1, formals1, n + 1) + case _ => matchArgs(args1, addTyped(treeToArg(defaultArg)), n + 1) else if methodType.isContextualMethod && ctx.mode.is(Mode.ImplicitsEnabled) then matchArgs(args1, addTyped(treeToArg(implicitArg)), n + 1) else @@ -630,9 +661,9 @@ trait Applications extends Compatibility { def msg = arg match case untpd.Tuple(Nil) if applyKind == ApplyKind.InfixTuple && funType.widen.isNullaryMethod => - i"can't supply unit value with infix notation because nullary $methString takes no arguments; use dotted invocation instead: (...).${methRef.name}()" + em"can't supply unit value with infix notation because nullary $methString takes no arguments; use dotted invocation instead: (...).${methRef.name}()" case _ => - i"too many arguments for $methString" + em"too many arguments for $methString" fail(msg, arg) case nil => } @@ -690,8 +721,8 @@ trait Applications extends Compatibility { || argMatch == ArgMatch.CompatibleCAP && { val argtpe1 = argtpe.widen - val captured = captureWildcards(argtpe1) - (captured ne argtpe1) && isCompatible(captured, formal.widenExpr) + val captured = captureWildcardsCompat(argtpe1, formal.widenExpr) + captured ne argtpe1 } /** The type of the given argument */ @@ -736,7 +767,7 @@ trait Applications extends Compatibility { /** Subclass of Application for type checking an Apply node, where * types of arguments are either known or unknown. */ - abstract class TypedApply[T >: Untyped]( + abstract class TypedApply[T <: Untyped]( app: untpd.Apply, fun: Tree, methRef: TermRef, args: List[Trees.Tree[T]], resultType: Type, override val applyKind: ApplyKind)(using Context) extends Application(methRef, fun.tpe, args, resultType) { @@ -919,6 +950,21 @@ trait Applications extends Compatibility { /** Type application where arguments come from prototype, and no implicits are inserted */ def simpleApply(fun1: Tree, proto: FunProto)(using Context): Tree = methPart(fun1).tpe match { + case funRef: TermRef if funRef.symbol.isSignaturePolymorphic => + // synthesize a method type based on the types at the call site. + // one can imagine the original signature-polymorphic method as + // being infinitely overloaded, with each individual overload only + // being brought into existence as needed + val originalResultType = funRef.symbol.info.resultType.stripNull + val resultType = + if !originalResultType.isRef(defn.ObjectClass) then originalResultType + else AvoidWildcardsMap()(proto.resultType.deepenProtoTrans) match + case SelectionProto(nme.asInstanceOf_, PolyProto(_, resTp), _, _) => resTp + case resTp if isFullyDefined(resTp, ForceDegree.all) => resTp + case _ => defn.ObjectType + val methType = MethodType(proto.typedArgs().map(_.tpe.widen), resultType) + val fun2 = Applications.retypeSignaturePolymorphicFn(fun1, methType) + simpleApply(fun2, proto) case funRef: TermRef => val app = ApplyTo(tree, fun1, funRef, proto, pt) convertNewGenericArray( @@ -964,7 +1010,10 @@ trait Applications extends Compatibility { case TypeApply(fun, _) => !fun.isInstanceOf[Select] case _ => false } - typedDynamicApply(tree, isInsertedApply, pt) + val tree1 = fun1 match + case Select(_, nme.apply) => tree + case _ => untpd.Apply(fun1, tree.args) + typedDynamicApply(tree1, isInsertedApply, pt) case _ => if (originalProto.isDropped) fun1 else if (fun1.symbol == defn.Compiletime_summonFrom) @@ -1079,7 +1128,7 @@ trait Applications extends Compatibility { /** Overridden in ReTyper to handle primitive operations that can be generated after erasure */ protected def handleUnexpectedFunType(tree: untpd.Apply, fun: Tree)(using Context): Tree = if ctx.reporter.errorsReported then - throw TypeError(i"unexpected function type: ${methPart(fun).tpe}") + throw TypeError(em"unexpected function type: ${methPart(fun).tpe}") else throw Error(i"unexpected type.\n fun = $fun,\n methPart(fun) = ${methPart(fun)},\n methPart(fun).tpe = ${methPart(fun).tpe},\n tpe = ${fun.tpe}") @@ -1087,8 +1136,8 @@ trait Applications extends Compatibility { for (case arg @ NamedArg(id, argtpt) <- args) yield { if !Feature.namedTypeArgsEnabled then report.error( - i"""Named type arguments are experimental, - |they must be enabled with a `experimental.namedTypeArguments` language import or setting""", + em"""Named type arguments are experimental, + |they must be enabled with a `experimental.namedTypeArguments` language import or setting""", arg.srcPos) val argtpt1 = typedType(argtpt) cpy.NamedArg(arg)(id, argtpt1).withType(argtpt1.tpe) @@ -1096,14 +1145,18 @@ trait Applications extends Compatibility { def typedTypeApply(tree: untpd.TypeApply, pt: Type)(using Context): Tree = { if (ctx.mode.is(Mode.Pattern)) - return errorTree(tree, "invalid pattern") + return errorTree(tree, em"invalid pattern") val isNamed = hasNamedArg(tree.args) val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) record("typedTypeApply") typedExpr(tree.fun, PolyProto(typedArgs, pt)) match { - case _: TypeApply if !ctx.isAfterTyper => - errorTree(tree, "illegal repeated type application") + case fun: TypeApply if !ctx.isAfterTyper => + val function = fun.fun + val args = (fun.args ++ tree.args).map(_.show).mkString(", ") + errorTree(tree, em"""illegal repeated type application + |You might have meant something like: + |${function}[${args}]""") case typedFn => typedFn.tpe.widen match { case pt: PolyType => @@ -1216,8 +1269,6 @@ trait Applications extends Compatibility { def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = { record("typedUnApply") val Apply(qual, args) = tree - if !ctx.mode.is(Mode.InTypeTest) then - checkMatchable(selType, tree.srcPos, pattern = true) def notAnExtractor(tree: Tree): Tree = // prefer inner errors @@ -1356,12 +1407,13 @@ trait Applications extends Compatibility { val unapplyArgType = mt.paramInfos.head unapp.println(i"unapp arg tpe = $unapplyArgType, pt = $selType") val ownType = - if (selType <:< unapplyArgType) { + if selType <:< unapplyArgType then unapp.println(i"case 1 $unapplyArgType ${ctx.typerState.constraint}") fullyDefinedType(unapplyArgType, "pattern selector", tree.srcPos) selType.dropAnnot(defn.UncheckedAnnot) // need to drop @unchecked. Just because the selector is @unchecked, the pattern isn't. - } - else { + else + if !ctx.mode.is(Mode.InTypeTest) then + checkMatchable(selType, tree.srcPos, pattern = true) // We ignore whether constraining the pattern succeeded. // Constraining only fails if the pattern cannot possibly match, // but useless pattern checks detect more such cases, so we simply rely on them instead. @@ -1370,7 +1422,7 @@ trait Applications extends Compatibility { if (patternBound.nonEmpty) unapplyFn = addBinders(unapplyFn, patternBound) unapp.println(i"case 2 $unapplyArgType ${ctx.typerState.constraint}") unapplyArgType - } + val dummyArg = dummyTreeOfType(ownType) val unapplyApp = typedExpr(untpd.TypedSplice(Apply(unapplyFn, dummyArg :: Nil))) def unapplyImplicits(unapp: Tree): List[Tree] = { @@ -1379,7 +1431,7 @@ trait Applications extends Compatibility { case Apply(Apply(unapply, `dummyArg` :: Nil), args2) => assert(args2.nonEmpty); res ++= args2 case Apply(unapply, `dummyArg` :: Nil) => case Inlined(u, _, _) => loop(u) - case DynamicUnapply(_) => report.error("Structural unapply is not supported", unapplyFn.srcPos) + case DynamicUnapply(_) => report.error(em"Structural unapply is not supported", unapplyFn.srcPos) case Apply(fn, args) => assert(args.nonEmpty); loop(fn); res ++= args case _ => ().assertingErrorsReported } @@ -1484,11 +1536,17 @@ trait Applications extends Compatibility { } /** Drop any leading implicit parameter sections */ - def stripImplicit(tp: Type)(using Context): Type = tp match { + def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { case mt: MethodType if mt.isImplicitMethod => - stripImplicit(resultTypeApprox(mt)) + stripImplicit(resultTypeApprox(mt, wildcardOnly)) case pt: PolyType => - pt.derivedLambdaType(pt.paramNames, pt.paramInfos, stripImplicit(pt.resultType)).asInstanceOf[PolyType].flatten + pt.derivedLambdaType(pt.paramNames, pt.paramInfos, + stripImplicit(pt.resultType, wildcardOnly = true)) + // can't use TypeParamRefs for parameter references in `resultTypeApprox` + // since their bounds can refer to type parameters in `pt` that are not + // bound by the constraint. This can lead to hygiene violations if subsequently + // `pt` itself is added to the constraint. Test case is run/enrich-gentraversable.scala. + .asInstanceOf[PolyType].flatten case _ => tp } @@ -1879,7 +1937,9 @@ trait Applications extends Compatibility { /** The shape of given tree as a type; cannot handle named arguments. */ def typeShape(tree: untpd.Tree): Type = tree match { case untpd.Function(args, body) => - defn.FunctionOf(args map Function.const(defn.AnyType), typeShape(body)) + defn.FunctionOf( + args.map(Function.const(defn.AnyType)), typeShape(body), + isContextual = untpd.isContextualClosure(tree)) case Match(EmptyTree, _) => defn.PartialFunctionClass.typeRef.appliedTo(defn.AnyType :: defn.NothingType :: Nil) case _ => @@ -1918,7 +1978,7 @@ trait Applications extends Compatibility { val formals = ref.widen.firstParamTypes if formals.length > idx then formals(idx) match - case defn.FunctionOf(args, _, _, _) => args.length + case defn.FunctionOf(args, _, _) => args.length case _ => -1 else -1 @@ -1947,9 +2007,8 @@ trait Applications extends Compatibility { def isVarArgs = ptypes.nonEmpty && ptypes.last.isRepeatedParam def numDefaultParams = if alt.symbol.hasDefaultParams then - trimParamss(tp, alt.symbol.rawParamss) match - case params :: _ => params.count(_.is(HasDefault)) - case _ => 0 + val fn = ref(alt, needLoad = false) + ptypes.indices.count(n => !findDefaultGetter(fn, n, testOnly = true).isEmpty) else 0 if numParams < numArgs then isVarArgs else if numParams == numArgs then true @@ -2003,7 +2062,7 @@ trait Applications extends Compatibility { if isDetermined(alts2) then alts2 else resolveMapped(alts1, _.widen.appliedTo(targs1.tpes), pt1) - case defn.FunctionOf(args, resultType, _, _) => + case defn.FunctionOf(args, resultType, _) => narrowByTypes(alts, args, resultType) case pt => @@ -2098,13 +2157,22 @@ trait Applications extends Compatibility { } end resolveOverloaded1 - /** The largest suffix of `paramss` that has the same first parameter name as `t` */ - def trimParamss(t: Type, paramss: List[List[Symbol]])(using Context): List[List[Symbol]] = t match + /** The largest suffix of `paramss` that has the same first parameter name as `t`, + * plus the number of term parameters in `paramss` that come before that suffix. + */ + def trimParamss(t: Type, paramss: List[List[Symbol]])(using Context): (List[List[Symbol]], Int) = t match case MethodType(Nil) => trimParamss(t.resultType, paramss) case t: MethodOrPoly => val firstParamName = t.paramNames.head - paramss.dropWhile(_.head.name != firstParamName) - case _ => Nil + def recur(pss: List[List[Symbol]], skipped: Int): (List[List[Symbol]], Int) = + (pss: @unchecked) match + case (ps @ (p :: _)) :: pss1 => + if p.name == firstParamName then (pss, skipped) + else recur(pss1, if p.name.isTermName then skipped + ps.length else skipped) + case Nil => + (pss, skipped) + recur(paramss, 0) + case _ => (Nil, 0) /** Resolve overloading by mapping to a different problem where each alternative's * type is mapped with `f`, alternatives with non-existing types are dropped, and the @@ -2114,8 +2182,19 @@ trait Applications extends Compatibility { val reverseMapping = alts.flatMap { alt => val t = f(alt) if t.exists then + val (trimmed, skipped) = trimParamss(t.stripPoly, alt.symbol.rawParamss) val mappedSym = alt.symbol.asTerm.copy(info = t) - mappedSym.rawParamss = trimParamss(t, alt.symbol.rawParamss) + mappedSym.rawParamss = trimmed + val (pre, totalSkipped) = mappedAltInfo(alt.symbol) match + case Some((pre, prevSkipped)) => + mappedSym.removeAnnotation(defn.MappedAlternativeAnnot) + (pre, skipped + prevSkipped) + case None => + (alt.prefix, skipped) + mappedSym.addAnnotation( + Annotation(TypeTree( + defn.MappedAlternativeAnnot.typeRef.appliedTo( + pre, ConstantType(Constant(totalSkipped)))))) Some((TermRef(NoPrefix, mappedSym), alt)) else None @@ -2146,7 +2225,7 @@ trait Applications extends Compatibility { val formalsForArg: List[Type] = altFormals.map(_.head) def argTypesOfFormal(formal: Type): List[Type] = formal.dealias match { - case defn.FunctionOf(args, result, isImplicit, isErased) => args + case defn.FunctionOf(args, result, isImplicit) => args case defn.PartialFunctionOf(arg, result) => arg :: Nil case _ => Nil } @@ -2169,7 +2248,7 @@ trait Applications extends Compatibility { false val commonFormal = if (isPartial) defn.PartialFunctionOf(commonParamTypes.head, WildcardType) - else defn.FunctionOf(commonParamTypes, WildcardType) + else defn.FunctionOf(commonParamTypes, WildcardType, isContextual = untpd.isContextualClosure(arg)) overload.println(i"pretype arg $arg with expected type $commonFormal") if (commonParamTypes.forall(isFullyDefined(_, ForceDegree.flipBottom))) withMode(Mode.ImplicitsEnabled) { @@ -2338,9 +2417,14 @@ trait Applications extends Compatibility { else None catch - case NonFatal(_) => None + case ex: UnhandledError => None def isApplicableExtensionMethod(methodRef: TermRef, receiverType: Type)(using Context): Boolean = methodRef.symbol.is(ExtensionMethod) && !receiverType.isBottomType && tryApplyingExtensionMethod(methodRef, nullLiteral.asInstance(receiverType)).nonEmpty + + def captureWildcardsCompat(tp: Type, pt: Type)(using Context): Type = + val captured = captureWildcards(tp) + if (captured ne tp) && isCompatible(captured, pt) then captured + else tp } diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 27d02f4cc0bf..bff9310dee88 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -33,7 +33,7 @@ import NameOps._ import SymDenotations.{NoCompleter, NoDenotation} import Applications.unapplyArgs import Inferencing.isFullyDefined -import transform.patmat.SpaceEngine.isIrrefutable +import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotedPattern} import config.Feature import config.Feature.sourceVersion import config.SourceVersion._ @@ -67,11 +67,12 @@ object Checking { */ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type = NoType, tpt: Tree = EmptyTree)(using Context): Unit = - args.lazyZip(boundss).foreach { (arg, bound) => - if !bound.isLambdaSub && !arg.tpe.hasSimpleKind then - errorTree(arg, - showInferred(MissingTypeParameterInTypeApp(arg.tpe), app, tpt)) - } + if ctx.phase != Phases.checkCapturesPhase then + args.lazyZip(boundss).foreach { (arg, bound) => + if !bound.isLambdaSub && !arg.tpe.hasSimpleKind then + errorTree(arg, + showInferred(MissingTypeParameterInTypeApp(arg.tpe), app, tpt)) + } for (arg, which, bound) <- TypeOps.boundsViolations(args, boundss, instantiate, app) do report.error( showInferred(DoesNotConformToBound(arg.tpe, which, bound), app, tpt), @@ -154,7 +155,7 @@ object Checking { checker.traverse(tpt.tpe) def checkNoWildcard(tree: Tree)(using Context): Tree = tree.tpe match { - case tpe: TypeBounds => errorTree(tree, "no wildcard type allowed here") + case tpe: TypeBounds => errorTree(tree, em"no wildcard type allowed here") case _ => tree } @@ -184,12 +185,14 @@ object Checking { /** Check that `tp` refers to a nonAbstract class * and that the instance conforms to the self type of the created class. */ - def checkInstantiable(tp: Type, pos: SrcPos)(using Context): Unit = + def checkInstantiable(tp: Type, srcTp: Type, pos: SrcPos)(using Context): Unit = tp.underlyingClassRef(refinementOK = false) match case tref: TypeRef => val cls = tref.symbol - if (cls.isOneOf(AbstractOrTrait)) - report.error(CantInstantiateAbstractClassOrTrait(cls, isTrait = cls.is(Trait)), pos) + if (cls.isOneOf(AbstractOrTrait)) { + val srcCls = srcTp.underlyingClassRef(refinementOK = false).typeSymbol + report.error(CantInstantiateAbstractClassOrTrait(srcCls, isTrait = srcCls.is(Trait)), pos) + } if !cls.is(Module) then // Create a synthetic singleton type instance, and check whether // it conforms to the self type of the class as seen from that instance. @@ -471,10 +474,11 @@ object Checking { def checkWithDeferred(flag: FlagSet) = if (sym.isOneOf(flag)) fail(AbstractMemberMayNotHaveModifier(sym, flag)) - def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: => String) = + def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: Message) = if (sym.isAllOf(flag1 | flag2)) fail(msg) def checkCombination(flag1: FlagSet, flag2: FlagSet) = - if sym.isAllOf(flag1 | flag2) then fail(i"illegal combination of modifiers: `${flag1.flagsString}` and `${flag2.flagsString}` for: $sym") + if sym.isAllOf(flag1 | flag2) then + fail(em"illegal combination of modifiers: `${flag1.flagsString}` and `${flag2.flagsString}` for: $sym") def checkApplicable(flag: Flag, ok: Boolean) = if sym.is(flag, butNot = Synthetic) && !ok then fail(ModifierNotAllowedForDefinition(flag)) @@ -494,7 +498,7 @@ object Checking { } if sym.is(Transparent) then if sym.isType then - if !sym.is(Trait) then fail(em"`transparent` can only be used for traits") + if !sym.isExtensibleClass then fail(em"`transparent` can only be used for extensible classes and traits") else if !sym.isInlineMethod then fail(em"`transparent` can only be used for inline methods") if (!sym.isClass && sym.is(Abstract)) @@ -519,7 +523,7 @@ object Checking { if !sym.isOneOf(Method | ModuleVal) then fail(TailrecNotApplicable(sym)) else if sym.is(Inline) then - fail("Inline methods cannot be @tailrec") + fail(em"Inline methods cannot be @tailrec") if sym.hasAnnotation(defn.TargetNameAnnot) && sym.isClass && sym.isTopLevelClass then fail(TargetNameOnTopLevelClass(sym)) if (sym.hasAnnotation(defn.NativeAnnot)) { @@ -538,7 +542,7 @@ object Checking { fail(CannotExtendAnyVal(sym)) if (sym.isConstructor && !sym.isPrimaryConstructor && sym.owner.is(Trait, butNot = JavaDefined)) val addendum = if ctx.settings.Ydebug.value then s" ${sym.owner.flagsString}" else "" - fail("Traits cannot have secondary constructors" + addendum) + fail(em"Traits cannot have secondary constructors$addendum") checkApplicable(Inline, sym.isTerm && !sym.isOneOf(Mutable | Module)) checkApplicable(Lazy, !sym.isOneOf(Method | Mutable)) if (sym.isType && !sym.isOneOf(Deferred | JavaDefined)) @@ -559,7 +563,7 @@ object Checking { // The issue with `erased inline` is that the erased semantics get lost // as the code is inlined and the reference is removed before the erased usage check. checkCombination(Erased, Inline) - checkNoConflict(Lazy, ParamAccessor, s"parameter may not be `lazy`") + checkNoConflict(Lazy, ParamAccessor, em"parameter may not be `lazy`") } /** Check for illegal or redundant modifiers on modules. This is done separately @@ -598,7 +602,7 @@ object Checking { */ def checkNoPrivateLeaks(sym: Symbol)(using Context): Type = { class NotPrivate extends TypeMap { - var errors: List[() => String] = Nil + var errors: List[Message] = Nil private var inCaptureSet: Boolean = false def accessBoundary(sym: Symbol): Symbol = @@ -630,7 +634,7 @@ object Checking { var tp1 = if (isLeaked(tp.symbol)) { errors = - (() => em"non-private ${sym.showLocated} refers to private ${tp.symbol}\nin its type signature ${sym.info}") + em"non-private ${sym.showLocated} refers to private ${tp.symbol}\nin its type signature ${sym.info}" :: errors tp } @@ -671,7 +675,7 @@ object Checking { } val notPrivate = new NotPrivate val info = notPrivate(sym.info) - notPrivate.errors.foreach(error => report.errorOrMigrationWarning(error(), sym.srcPos, from = `3.0`)) + notPrivate.errors.foreach(report.errorOrMigrationWarning(_, sym.srcPos, from = `3.0`)) info } @@ -806,13 +810,13 @@ trait Checking { /** Check that type `tp` is stable. */ def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = - if !tp.isStable then report.error(NotAPath(tp, kind), pos) + if !tp.isStable && !tp.isErroneous then report.error(NotAPath(tp, kind), pos) /** Check that all type members of `tp` have realizable bounds */ def checkRealizableBounds(cls: Symbol, pos: SrcPos)(using Context): Unit = { val rstatus = boundsRealizability(cls.thisType) if (rstatus ne Realizable) - report.error(ex"$cls cannot be instantiated since it${rstatus.msg}", pos) + report.error(em"$cls cannot be instantiated since it${rstatus.msg}", pos) } /** Check that pattern `pat` is irrefutable for scrutinee type `sel.tpe`. @@ -833,7 +837,7 @@ trait Checking { var reportedPt = pt.dropAnnot(defn.UncheckedAnnot) if !pat.tpe.isSingleton then reportedPt = reportedPt.widen val problem = if pat.tpe <:< reportedPt then "is more specialized than" else "does not match" - ex"pattern's type ${pat.tpe} $problem the right hand side expression's type $reportedPt" + em"pattern's type ${pat.tpe} $problem the right hand side expression's type $reportedPt" case RefutableExtractor => val extractor = val UnApply(fn, _, _) = pat: @unchecked @@ -842,6 +846,10 @@ trait Checking { case _ => EmptyTree if extractor.isEmpty then em"pattern binding uses refutable extractor" + else if extractor.symbol eq defn.QuoteMatching_ExprMatch then + em"pattern binding uses refutable extractor `'{...}`" + else if extractor.symbol eq defn.QuoteMatching_TypeMatch then + em"pattern binding uses refutable extractor `'[...]`" else em"pattern binding uses refutable extractor `$extractor`" @@ -861,10 +869,11 @@ trait Checking { else pat.srcPos def rewriteMsg = Message.rewriteNotice("This patch", `3.2-migration`) report.gradualErrorOrMigrationWarning( - em"""$message - | - |If $usage is intentional, this can be communicated by $fix, - |which $addendum.$rewriteMsg""", + message.append( + i"""| + | + |If $usage is intentional, this can be communicated by $fix, + |which $addendum.$rewriteMsg"""), pos, warnFrom = `3.2`, errorFrom = `future`) false } @@ -879,9 +888,9 @@ trait Checking { pat match case Bind(_, pat1) => recur(pat1, pt) - case UnApply(fn, _, pats) => + case UnApply(fn, implicits, pats) => check(pat, pt) && - (isIrrefutable(fn, pats.length) || fail(pat, pt, Reason.RefutableExtractor)) && { + (isIrrefutable(fn, pats.length) || isIrrefutableQuotedPattern(fn, implicits, pt) || fail(pat, pt, Reason.RefutableExtractor)) && { val argPts = unapplyArgs(fn.tpe.widen.finalResultType, fn, pats, pat.srcPos) pats.corresponds(argPts)(recur) } @@ -901,7 +910,7 @@ trait Checking { private def checkLegalImportOrExportPath(path: Tree, kind: String)(using Context): Unit = { checkStable(path.tpe, path.srcPos, kind) if (!ctx.isAfterTyper) Checking.checkRealizable(path.tpe, path.srcPos) - if !isIdempotentExpr(path) then + if !isIdempotentExpr(path) && !path.tpe.isErroneous then report.error(em"import prefix is not a pure expression", path.srcPos) } @@ -933,8 +942,8 @@ trait Checking { // we restrict wildcard export from package as incremental compilation does not yet // register a dependency on "all members of a package" - see https://github.com/sbt/zinc/issues/226 report.error( - em"Implementation restriction: ${path.tpe.classSymbol} is not a valid prefix " + - "for a wildcard export, as it is a package.", path.srcPos) + em"Implementation restriction: ${path.tpe.classSymbol} is not a valid prefix for a wildcard export, as it is a package", + path.srcPos) /** Check that module `sym` does not clash with a class of the same name * that is concurrently compiled in another source file. @@ -977,14 +986,15 @@ trait Checking { sym.srcPos) /** If `tree` is an application of a new-style implicit conversion (using the apply - * method of a `scala.Conversion` instance), check that implicit conversions are - * enabled. + * method of a `scala.Conversion` instance), check that the expected type is + * a convertible formal parameter type or that implicit conversions are enabled. */ - def checkImplicitConversionUseOK(tree: Tree)(using Context): Unit = + def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = val sym = tree.symbol if sym.name == nme.apply && sym.owner.derivesFrom(defn.ConversionClass) && !sym.info.isErroneous + && !expected.isConvertibleParam then def conv = methPart(tree) match case Select(qual, _) => qual.symbol.orElse(sym.owner) @@ -1020,8 +1030,8 @@ trait Checking { ("method", (n: Name) => s"method syntax .$n(...)") def rewriteMsg = Message.rewriteNotice("The latter", options = "-deprecation") report.deprecationWarning( - i"""Alphanumeric $kind $name is not declared ${hlAsKeyword("infix")}; it should not be used as infix operator. - |Instead, use ${alternative(name)} or backticked identifier `$name`.$rewriteMsg""", + em"""Alphanumeric $kind $name is not declared ${hlAsKeyword("infix")}; it should not be used as infix operator. + |Instead, use ${alternative(name)} or backticked identifier `$name`.$rewriteMsg""", tree.op.srcPos) if (ctx.settings.deprecation.value) { patch(Span(tree.op.span.start, tree.op.span.start), "`") @@ -1047,14 +1057,14 @@ trait Checking { def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = { def checkGoodBounds(tp: Type) = tp match { case tp @ TypeBounds(lo, hi) if !(lo <:< hi) => - report.error(ex"no type exists between low bound $lo and high bound $hi$where", pos) + report.error(em"no type exists between low bound $lo and high bound $hi$where", pos) TypeBounds(hi, hi) case _ => tp } tp match { case tp @ AndType(tp1, tp2) => - report.error(s"conflicting type arguments$where", pos) + report.error(em"conflicting type arguments$where", pos) tp1 case tp @ AppliedType(tycon, args) => tp.derivedAppliedType(tycon, args.mapConserve(checkGoodBounds)) @@ -1108,10 +1118,12 @@ trait Checking { def checkParentCall(call: Tree, caller: ClassSymbol)(using Context): Unit = if (!ctx.isAfterTyper) { val called = call.tpe.classSymbol + if (called.is(JavaAnnotation)) + report.error(em"${called.name} must appear without any argument to be a valid class parent because it is a Java annotation", call.srcPos) if (caller.is(Trait)) - report.error(i"$caller may not call constructor of $called", call.srcPos) + report.error(em"$caller may not call constructor of $called", call.srcPos) else if (called.is(Trait) && !caller.mixins.contains(called)) - report.error(i"""$called is already implemented by super${caller.superClass}, + report.error(em"""$called is already implemented by super${caller.superClass}, |its constructor cannot be called again""", call.srcPos) // Check that constructor call is of the form _.(args1)...(argsN). @@ -1120,7 +1132,7 @@ trait Checking { case Apply(fn, _) => checkLegalConstructorCall(fn, tree, "") case TypeApply(fn, _) => checkLegalConstructorCall(fn, tree, "type ") case Select(_, nme.CONSTRUCTOR) => // ok - case _ => report.error(s"too many ${kind}arguments in parent constructor", encl.srcPos) + case _ => report.error(em"too many ${kind}arguments in parent constructor", encl.srcPos) } call match { case Apply(fn, _) => checkLegalConstructorCall(fn, call, "") @@ -1170,7 +1182,7 @@ trait Checking { parent match { case parent: ClassSymbol => if (parent.is(Case)) - report.error(ex"""case $caseCls has case ancestor $parent, but case-to-case inheritance is prohibited. + report.error(em"""case $caseCls has case ancestor $parent, but case-to-case inheritance is prohibited. |To overcome this limitation, use extractors to pattern match on non-leaf nodes.""", pos) else checkCaseInheritance(parent.superClass, caseCls, pos) case _ => @@ -1184,7 +1196,7 @@ trait Checking { val check = new TreeTraverser { def traverse(tree: Tree)(using Context) = tree match { case id: Ident if vparams.exists(_.symbol == id.symbol) => - report.error("illegal forward reference to method parameter", id.srcPos) + report.error(em"illegal forward reference to method parameter", id.srcPos) case _ => traverseChildren(tree) } @@ -1227,7 +1239,7 @@ trait Checking { if (t.span.isSourceDerived && owner == badOwner) t match { case t: RefTree if allowed(t.name, checkedSym) => - case _ => report.error(i"illegal reference to $checkedSym from $where", t.srcPos) + case _ => report.error(em"illegal reference to $checkedSym from $where", t.srcPos) } val sym = t.symbol t match { @@ -1261,6 +1273,23 @@ trait Checking { if !Inlines.inInlineMethod && !ctx.isInlineContext then report.error(em"$what can only be used in an inline method", pos) + /** Check that the class corresponding to this tree is either a Scala or Java annotation. + * + * @return The original tree or an error tree in case `tree` isn't a valid + * annotation or already an error tree. + */ + def checkAnnotClass(tree: Tree)(using Context): Tree = + if tree.tpe.isError then + return tree + val cls = Annotations.annotClass(tree) + if cls.is(JavaDefined) then + if !cls.is(JavaAnnotation) then + errorTree(tree, em"$cls is not a valid Java annotation: it was not declared with `@interface`") + else tree + else if !cls.derivesFrom(defn.AnnotationClass) then + errorTree(tree, em"$cls is not a valid Scala annotation: it does not extend `scala.annotation.Annotation`") + else tree + /** Check arguments of compiler-defined annotations */ def checkAnnotArgs(tree: Tree)(using Context): tree.type = val cls = Annotations.annotClass(tree) @@ -1327,7 +1356,7 @@ trait Checking { def ensureParentDerivesFrom(enumCase: Symbol)(using Context) = val enumCls = enumCase.owner.linkedClass if !firstParent.derivesFrom(enumCls) then - report.error(i"enum case does not extend its enum $enumCls", enumCase.srcPos) + report.error(em"enum case does not extend its enum $enumCls", enumCase.srcPos) cls.info match case info: ClassInfo => cls.info = info.derivedClassInfo(declaredParents = enumCls.typeRefApplied :: info.declaredParents) @@ -1365,9 +1394,9 @@ trait Checking { if (stat.symbol.isAllOf(EnumCase)) stat match { - case TypeDef(_, Template(DefDef(_, paramss, _, _), parents, _, _)) => + case TypeDef(_, impl @ Template(DefDef(_, paramss, _, _), _, _, _)) => paramss.foreach(_.foreach(check)) - parents.foreach(check) + impl.parents.foreach(check) case vdef: ValDef => vdef.rhs match { case Block((clsDef @ TypeDef(_, impl: Template)) :: Nil, _) @@ -1432,7 +1461,6 @@ trait Checking { def checkMatchable(tp: Type, pos: SrcPos, pattern: Boolean)(using Context): Unit = if !tp.derivesFrom(defn.MatchableClass) && sourceVersion.isAtLeast(`future-migration`) then - val kind = if pattern then "pattern selector" else "value" report.warning(MatchableWarning(tp, pattern), pos) /** Check that there is an implicit capability to throw a checked exception @@ -1515,7 +1543,7 @@ trait NoChecking extends ReChecking { override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = () override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = tp override def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = () - override def checkImplicitConversionUseOK(tree: Tree)(using Context): Unit = () + override def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = () override def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = tp override def checkAnnotArgs(tree: Tree)(using Context): tree.type = tree override def checkNoTargetNameConflict(stats: List[Tree])(using Context): Unit = () diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 044dd7bb8528..ef9599be551c 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -67,7 +67,7 @@ class CrossVersionChecks extends MiniPhase: if !skipWarning then val msg = annot.argumentConstant(0).map(": " + _.stringValue).getOrElse("") val since = annot.argumentConstant(1).map(" since " + _.stringValue).getOrElse("") - report.deprecationWarning(s"${sym.showLocated} is deprecated${since}${msg}", pos) + report.deprecationWarning(em"${sym.showLocated} is deprecated${since}${msg}", pos) private def checkExperimentalSignature(sym: Symbol, pos: SrcPos)(using Context): Unit = class Checker extends TypeTraverser: @@ -110,20 +110,12 @@ class CrossVersionChecks extends MiniPhase: !sym.isDeprecated && !sym.is(Deferred)) if (!concrOvers.isEmpty) report.deprecationWarning( - symbol.toString + " overrides concrete, non-deprecated symbol(s):" + - concrOvers.map(_.name).mkString(" ", ", ", ""), tree.srcPos) + em"""$symbol overrides concrete, non-deprecated definition(s): + | ${concrOvers.map(_.name).mkString(", ")}""", + tree.srcPos) } } - /** Check that classes extending experimental classes or nested in experimental classes have the @experimental annotation. */ - private def checkExperimentalInheritance(cls: ClassSymbol)(using Context): Unit = - if !cls.isAnonymousClass && !cls.hasAnnotation(defn.ExperimentalAnnot) then - cls.info.parents.find(_.typeSymbol.isExperimental) match - case Some(parent) => - report.error(em"extension of experimental ${parent.typeSymbol} must have @experimental annotation", cls.srcPos) - case _ => - end checkExperimentalInheritance - override def transformValDef(tree: ValDef)(using Context): ValDef = checkDeprecatedOvers(tree) checkExperimentalAnnots(tree.symbol) @@ -136,12 +128,6 @@ class CrossVersionChecks extends MiniPhase: checkExperimentalSignature(tree.symbol, tree) tree - override def transformTemplate(tree: Template)(using Context): Tree = - val cls = ctx.owner.asClass - checkExperimentalInheritance(cls) - checkExperimentalAnnots(cls) - tree - override def transformIdent(tree: Ident)(using Context): Ident = { checkUndesiredProperties(tree.symbol, tree.srcPos) tree diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index d2165a5ca8c5..8fdc468780ba 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -44,7 +44,7 @@ trait Deriving { private def addDerivedInstance(clsName: Name, info: Type, pos: SrcPos): Unit = { val instanceName = "derived$".concat(clsName) if (ctx.denotNamed(instanceName).exists) - report.error(i"duplicate type class derivation for $clsName", pos) + report.error(em"duplicate type class derivation for $clsName", pos) else // If we set the Synthetic flag here widenGiven will widen too far and the // derived instance will have too low a priority to be selected over a freshly @@ -90,7 +90,7 @@ trait Deriving { xs.corresponds(ys)((x, y) => x.paramInfo.hasSameKindAs(y.paramInfo)) def cannotBeUnified = - report.error(i"${cls.name} cannot be unified with the type argument of ${typeClass.name}", derived.srcPos) + report.error(em"${cls.name} cannot be unified with the type argument of ${typeClass.name}", derived.srcPos) def addInstance(derivedParams: List[TypeSymbol], evidenceParamInfos: List[List[Type]], instanceTypes: List[Type]): Unit = { val resultType = typeClassType.appliedTo(instanceTypes) @@ -252,7 +252,7 @@ trait Deriving { if (typeClassArity == 1) deriveSingleParameter else if (typeClass == defn.CanEqualClass) deriveCanEqual else if (typeClassArity == 0) - report.error(i"type ${typeClass.name} in derives clause of ${cls.name} has no type parameters", derived.srcPos) + report.error(em"type ${typeClass.name} in derives clause of ${cls.name} has no type parameters", derived.srcPos) else cannotBeUnified } diff --git a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala index 5fefd355d7d8..d819528ff556 100644 --- a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala +++ b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala @@ -37,7 +37,7 @@ object Docstrings { case List(df: tpd.DefDef) => usecase.typed(df) case _ => - report.error("`@usecase` was not a valid definition", ctx.source.atSpan(usecase.codePos)) + report.error(em"`@usecase` was not a valid definition", ctx.source.atSpan(usecase.codePos)) usecase } } diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index 1630ce31e4c6..717966923708 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -2,20 +2,22 @@ package dotty.tools package dotc package typer -import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Names.{Name, TermName} -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.TypeErasure -import util.Spans._ -import core.Symbols._ -import ErrorReporting._ -import reporting._ +import util.Spans.* +import core.Symbols.* +import ErrorReporting.* +import dotty.tools.dotc.transform.ValueClasses +import dotty.tools.dotc.transform.TypeUtils.isPrimitiveValueType +import reporting.* object Dynamic { private def isDynamicMethod(name: Name): Boolean = @@ -80,7 +82,7 @@ trait Dynamic { val args = tree.args val dynName = if (args.exists(isNamedArg)) nme.applyDynamicNamed else nme.applyDynamic if (dynName == nme.applyDynamicNamed && untpd.isWildcardStarArgList(args)) - errorTree(tree, "applyDynamicNamed does not support passing a vararg parameter") + errorTree(tree, em"applyDynamicNamed does not support passing a vararg parameter") else { def namedArgTuple(name: String, arg: untpd.Tree) = untpd.Tuple(List(Literal(Constant(name)), arg)) def namedArgs = args.map { @@ -179,12 +181,12 @@ trait Dynamic { val vargss = termArgss(tree) def structuralCall(selectorName: TermName, classOfs: => List[Tree]) = { - val selectable = adapt(qual, defn.SelectableClass.typeRef) + val selectable = adapt(qual, defn.SelectableClass.typeRef | defn.DynamicClass.typeRef) // ($qual: Selectable).$selectorName("$name") val base = untpd.Apply( - untpd.TypedSplice(selectable.select(selectorName)).withSpan(fun.span), + untpd.Select(untpd.TypedSplice(selectable), selectorName).withSpan(fun.span), (Literal(Constant(name.encode.toString)) :: Nil).map(untpd.TypedSplice(_))) val scall = @@ -214,9 +216,33 @@ trait Dynamic { def fail(reason: String): Tree = errorTree(tree, em"Structural access not allowed on method $name because it $reason") + extension (tree: Tree) + /** The implementations of `selectDynamic` and `applyDynamic` in `scala.reflect.SelectDynamic` have no information about the expected return type of a value/method which was declared in the refinement, + * only the JVM type after erasure can be obtained through reflection, e.g. + * + * class Foo(val i: Int) extends AnyVal + * class Reflective extends reflect.Selectable + * val reflective = new Reflective { + * def foo = Foo(1) // Foo at compile time, java.lang.Integer in reflection + * } + * + * Because of that reflective access cannot be implemented properly in `scala.reflect.SelectDynamic` itself + * because it's not known there if the value should be wrapped in a value class constructor call or not. + * Hence the logic of wrapping is performed here, relying on the fact that the implementations of `selectDynamic` and `applyDynamic` in `scala.reflect.SelectDynamic` are final. + */ + def maybeBoxingCast(tpe: Type) = + val maybeBoxed = + if ValueClasses.isDerivedValueClass(tpe.classSymbol) && qual.tpe <:< defn.ReflectSelectableTypeRef then + val genericUnderlying = ValueClasses.valueClassUnbox(tpe.classSymbol.asClass) + val underlying = tpe.select(genericUnderlying).widen.resultType + New(tpe, tree.cast(underlying) :: Nil) + else + tree + maybeBoxed.cast(tpe) + fun.tpe.widen match { case tpe: ValueType => - structuralCall(nme.selectDynamic, Nil).cast(tpe) + structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(tpe) case tpe: MethodType => def isDependentMethod(tpe: Type): Boolean = tpe match { @@ -236,7 +262,7 @@ trait Dynamic { fail(i"has a parameter type with an unstable erasure") :: Nil else TypeErasure.erasure(tpe).asInstanceOf[MethodType].paramInfos.map(clsOf(_)) - structuralCall(nme.applyDynamic, classOfs).cast(tpe.finalResultType) + structuralCall(nme.applyDynamic, classOfs).maybeBoxingCast(tpe.finalResultType) } // (@allanrenucci) I think everything below is dead code diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index cdd37a2f0be7..126d109889e1 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -10,11 +10,9 @@ import Trees._ import NameOps._ import util.SrcPos import config.Feature -import java.util.regex.Matcher.quoteReplacement import reporting._ import collection.mutable -import scala.util.matching.Regex object ErrorReporting { @@ -43,12 +41,24 @@ object ErrorReporting { errorType(WrongNumberOfTypeArgs(fntpe, expectedArgs, actual), pos) def missingArgs(tree: Tree, mt: Type)(using Context): Unit = + def isCallableWithoutArgumentsLists(mt: Type): Boolean = mt match + case pt: PolyType => isCallableWithoutArgumentsLists(pt.resType) + case mt: MethodType if mt.isImplicitMethod => isCallableWithoutArgumentsLists(mt.resType) + case mt: MethodType => false + case _ => true + def isCallableWithSingleEmptyArgumentList(mt: Type): Boolean = + mt match + case mt: MethodType if mt.paramNames.isEmpty => isCallableWithoutArgumentsLists(mt.resType) + case mt: MethodType if mt.isImplicitMethod => isCallableWithSingleEmptyArgumentList(mt.resType) + case pt: PolyType => isCallableWithSingleEmptyArgumentList(pt.resType) + case _ => false val meth = err.exprStr(methPart(tree)) - mt match - case mt: MethodType if mt.paramNames.isEmpty => - report.error(MissingEmptyArgumentList(meth), tree.srcPos) - case _ => - report.error(em"missing arguments for $meth", tree.srcPos) + val info = if tree.symbol.exists then tree.symbol.info else mt + if isCallableWithSingleEmptyArgumentList(info) then + report.error(MissingEmptyArgumentList(meth), tree.srcPos) + else + report.error(MissingArgumentList(meth, tree.symbol), tree.srcPos) + def matchReductionAddendum(tps: Type*)(using Context): String = val collectMatchTrace = new TypeAccumulator[String]: @@ -69,17 +79,30 @@ object ErrorReporting { "\n(Note that variables need to be initialized to be defined)" else "" + /** Reveal arguments in FunProtos that are proteted by an IgnoredProto but were + * revealed during type inference. This gives clearer error messages for overloading + * resolution errors that need to show argument lists after the first. We do not + * reveal other kinds of ignored prototypes since these might be misleading because + * there might be a possible implicit conversion on the result. + */ + def revealDeepenedArgs(tp: Type): Type = tp match + case tp @ IgnoredProto(deepTp: FunProto) if tp.wasDeepened => deepTp + case _ => tp + def expectedTypeStr(tp: Type): String = tp match { case tp: PolyProto => - em"type arguments [${tp.targs.tpes}%, %] and ${expectedTypeStr(tp.resultType)}" + i"type arguments [${tp.targs.tpes}%, %] and ${expectedTypeStr(revealDeepenedArgs(tp.resultType))}" case tp: FunProto => - val result = tp.resultType match { - case _: WildcardType | _: IgnoredProto => "" - case tp => em" and expected result type $tp" - } - em"arguments (${tp.typedArgs().tpes}%, %)$result" + def argStr(tp: FunProto): String = + val result = revealDeepenedArgs(tp.resultType) match { + case restp: FunProto => argStr(restp) + case _: WildcardType | _: IgnoredProto => "" + case tp => i" and expected result type $tp" + } + i"(${tp.typedArgs().tpes}%, %)$result" + s"arguments ${argStr(tp)}" case _ => - em"expected type $tp" + i"expected type $tp" } def anonymousTypeMemberStr(tpe: Type): String = { @@ -88,12 +111,12 @@ object ErrorReporting { case _: MethodOrPoly => "method" case _ => "value of type" } - em"$kind $tpe" + i"$kind $tpe" } def overloadedAltsStr(alts: List[SingleDenotation]): String = - em"overloaded alternatives of ${denotStr(alts.head)} with types\n" + - em" ${alts map (_.info)}%\n %" + i"""overloaded alternatives of ${denotStr(alts.head)} with types + | ${alts map (_.info)}%\n %""" def denotStr(denot: Denotation): String = if (denot.isOverloaded) overloadedAltsStr(denot.alternatives) @@ -111,13 +134,30 @@ object ErrorReporting { case _ => anonymousTypeMemberStr(tp) } + /** Explain info of symbol `sym` as a member of class `base`. + * @param showLocation if true also show sym's location. + */ + def infoString(sym: Symbol, base: Type, showLocation: Boolean): String = + val sym1 = sym.underlyingSymbol + def info = base.memberInfo(sym1) + val infoStr = + if sym1.isAliasType then i", which equals ${info.bounds.hi}" + else if sym1.isAbstractOrParamType && info != TypeBounds.empty then i" with bounds$info" + else if sym1.is(Module) then "" + else if sym1.isTerm then i" of type $info" + else "" + i"${if showLocation then sym1.showLocated else sym1}$infoStr" + + def infoStringWithLocation(sym: Symbol, base: Type) = + infoString(sym, base, showLocation = true) + def exprStr(tree: Tree): String = refStr(tree.tpe) - def takesNoParamsStr(tree: Tree, kind: String): String = + def takesNoParamsMsg(tree: Tree, kind: String): Message = if (tree.tpe.widen.exists) - i"${exprStr(tree)} does not take ${kind}parameters" + em"${exprStr(tree)} does not take ${kind}parameters" else { - i"undefined: $tree # ${tree.uniqueId}: ${tree.tpe.toString} at ${ctx.phase}" + em"undefined: $tree # ${tree.uniqueId}: ${tree.tpe.toString} at ${ctx.phase}" } def patternConstrStr(tree: Tree): String = ??? @@ -168,7 +208,9 @@ object ErrorReporting { |The tests were made under $constraintText""" def whyFailedStr(fail: FailedExtension) = - i""" failed with + i""" + | + | failed with: | |${fail.whyFailed.message.indented(8)}""" @@ -236,201 +278,9 @@ object ErrorReporting { ownerSym.typeRef.nonClassTypeMembers.map(_.symbol) }.toList - def dependentStr = + def dependentMsg = """Term-dependent types are experimental, - |they must be enabled with a `experimental.dependent` language import or setting""".stripMargin + |they must be enabled with a `experimental.dependent` language import or setting""".stripMargin.toMessage def err(using Context): Errors = new Errors } - -class ImplicitSearchError( - arg: tpd.Tree, - pt: Type, - where: String, - paramSymWithMethodCallTree: Option[(Symbol, tpd.Tree)] = None, - ignoredInstanceNormalImport: => Option[SearchSuccess], - importSuggestionAddendum: => String -)(using ctx: Context) { - - def missingArgMsg = arg.tpe match { - case ambi: AmbiguousImplicits => - (ambi.alt1, ambi.alt2) match { - case (alt @ AmbiguousImplicitMsg(msg), _) => - userDefinedAmbiguousImplicitMsg(alt, msg) - case (_, alt @ AmbiguousImplicitMsg(msg)) => - userDefinedAmbiguousImplicitMsg(alt, msg) - case _ => - defaultAmbiguousImplicitMsg(ambi) - } - case ambi @ TooUnspecific(target) => - ex"""No implicit search was attempted${location("for")} - |since the expected type $target is not specific enough""" - case _ => - val shortMessage = userDefinedImplicitNotFoundParamMessage - .orElse(userDefinedImplicitNotFoundTypeMessage) - .getOrElse(defaultImplicitNotFoundMessage) - formatMsg(shortMessage)() - ++ hiddenImplicitsAddendum - ++ ErrorReporting.matchReductionAddendum(pt) - } - - private def formatMsg(shortForm: String)(headline: String = shortForm) = arg match - case arg: Trees.SearchFailureIdent[?] => - arg.tpe match - case _: NoMatchingImplicits => headline - case tpe: SearchFailureType => - i"$headline. ${tpe.explanation}" - case _ => headline - case _ => - arg.tpe match - case tpe: SearchFailureType => - val original = arg match - case Inlined(call, _, _) => call - case _ => arg - i"""$headline. - |I found: - | - | ${original.show.replace("\n", "\n ")} - | - |But ${tpe.explanation}.""" - case _ => headline - - /** Format `raw` implicitNotFound or implicitAmbiguous argument, replacing - * all occurrences of `${X}` where `X` is in `paramNames` with the - * corresponding shown type in `args`. - */ - private def userDefinedErrorString(raw: String, paramNames: List[String], args: List[Type]): String = { - def translate(name: String): Option[String] = { - val idx = paramNames.indexOf(name) - if (idx >= 0) Some(ex"${args(idx)}") else None - } - - """\$\{\s*([^}\s]+)\s*\}""".r.replaceAllIn(raw, (_: Regex.Match) match { - case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("")).nn - }) - } - - /** Extract a user defined error message from a symbol `sym` - * with an annotation matching the given class symbol `cls`. - */ - private def userDefinedMsg(sym: Symbol, cls: Symbol) = for { - ann <- sym.getAnnotation(cls) - msg <- ann.argumentConstantString(0) - } yield msg - - private def location(preposition: String) = if (where.isEmpty) "" else s" $preposition $where" - - private def defaultAmbiguousImplicitMsg(ambi: AmbiguousImplicits) = - s"Ambiguous given instances: ${ambi.explanation}${location("of")}" - - private def defaultImplicitNotFoundMessage = - ex"No given instance of type $pt was found${location("for")}" - - /** Construct a custom error message given an ambiguous implicit - * candidate `alt` and a user defined message `raw`. - */ - private def userDefinedAmbiguousImplicitMsg(alt: SearchSuccess, raw: String) = { - val params = alt.ref.underlying match { - case p: PolyType => p.paramNames.map(_.toString) - case _ => Nil - } - def resolveTypes(targs: List[tpd.Tree])(using Context) = - targs.map(a => Inferencing.fullyDefinedType(a.tpe, "type argument", a.srcPos)) - - // We can extract type arguments from: - // - a function call: - // @implicitAmbiguous("msg A=${A}") - // implicit def f[A](): String = ... - // implicitly[String] // found: f[Any]() - // - // - an eta-expanded function: - // @implicitAmbiguous("msg A=${A}") - // implicit def f[A](x: Int): String = ... - // implicitly[Int => String] // found: x => f[Any](x) - - val call = tpd.closureBody(alt.tree) // the tree itself if not a closure - val targs = tpd.typeArgss(call).flatten - val args = resolveTypes(targs)(using ctx.fresh.setTyperState(alt.tstate)) - userDefinedErrorString(raw, params, args) - } - - /** @param rawMsg Message template with variables, e.g. "Variable A is ${A}" - * @param sym Symbol of the annotated type or of the method whose parameter was annotated - * @param substituteType Function substituting specific types for abstract types associated with variables, e.g A -> Int - */ - private def formatAnnotationMessage(rawMsg: String, sym: Symbol, substituteType: Type => Type): String = { - val substitutableTypesSymbols = ErrorReporting.substitutableTypeSymbolsInScope(sym) - - userDefinedErrorString( - rawMsg, - paramNames = substitutableTypesSymbols.map(_.name.unexpandedName.toString), - args = substitutableTypesSymbols.map(_.typeRef).map(substituteType) - ) - } - - /** Extracting the message from a method parameter, e.g. in - * - * trait Foo - * - * def foo(implicit @annotation.implicitNotFound("Foo is missing") foo: Foo): Any = ??? - */ - private def userDefinedImplicitNotFoundParamMessage: Option[String] = paramSymWithMethodCallTree.flatMap { (sym, applTree) => - userDefinedMsg(sym, defn.ImplicitNotFoundAnnot).map { rawMsg => - val fn = tpd.funPart(applTree) - val targs = tpd.typeArgss(applTree).flatten - val methodOwner = fn.symbol.owner - val methodOwnerType = tpd.qualifier(fn).tpe - val methodTypeParams = fn.symbol.paramSymss.flatten.filter(_.isType) - val methodTypeArgs = targs.map(_.tpe) - val substituteType = (_: Type).asSeenFrom(methodOwnerType, methodOwner).subst(methodTypeParams, methodTypeArgs) - formatAnnotationMessage(rawMsg, sym.owner, substituteType) - } - } - - /** Extracting the message from a type, e.g. in - * - * @annotation.implicitNotFound("Foo is missing") - * trait Foo - * - * def foo(implicit foo: Foo): Any = ??? - */ - private def userDefinedImplicitNotFoundTypeMessage: Option[String] = - def recur(tp: Type): Option[String] = tp match - case tp: TypeRef => - val sym = tp.symbol - userDefinedImplicitNotFoundTypeMessage(sym).orElse(recur(tp.info)) - case tp: ClassInfo => - tp.baseClasses.iterator - .map(userDefinedImplicitNotFoundTypeMessage) - .find(_.isDefined).flatten - case tp: TypeProxy => - recur(tp.superType) - case tp: AndType => - recur(tp.tp1).orElse(recur(tp.tp2)) - case _ => - None - recur(pt) - - private def userDefinedImplicitNotFoundTypeMessage(sym: Symbol): Option[String] = - for - rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) - if Feature.migrateTo3 || sym != defn.Function1 - // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore - yield - val substituteType = (_: Type).asSeenFrom(pt, sym) - formatAnnotationMessage(rawMsg, sym, substituteType) - - private def hiddenImplicitsAddendum: String = - def hiddenImplicitNote(s: SearchSuccess) = - em"\n\nNote: ${s.ref.symbol.showLocated} was not considered because it was not imported with `import given`." - - val normalImports = ignoredInstanceNormalImport.map(hiddenImplicitNote) - - normalImports.getOrElse(importSuggestionAddendum) - end hiddenImplicitsAddendum - - private object AmbiguousImplicitMsg { - def unapply(search: SearchSuccess): Option[String] = - userDefinedMsg(search.ref.symbol, defn.ImplicitAmbiguousAnnot) - } -} diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index 46725f0fa6b2..b1513df777ec 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -285,8 +285,9 @@ object EtaExpansion extends LiftImpure { val body = Apply(lifted, ids) if (mt.isContextualMethod) body.setApplyKind(ApplyKind.Using) val fn = - if (mt.isContextualMethod) new untpd.FunctionWithMods(params, body, Modifiers(Given)) - else if (mt.isImplicitMethod) new untpd.FunctionWithMods(params, body, Modifiers(Implicit)) + if (mt.isContextualMethod) new untpd.FunctionWithMods(params, body, Modifiers(Given), mt.erasedParams) + else if (mt.isImplicitMethod) new untpd.FunctionWithMods(params, body, Modifiers(Implicit), mt.erasedParams) + else if (mt.hasErasedParams) new untpd.FunctionWithMods(params, body, Modifiers(), mt.erasedParams) else untpd.Function(params, body) if (defs.nonEmpty) untpd.Block(defs.toList map (untpd.TypedSplice(_)), fn) else fn } diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index ede44c2b7f86..a9631ad45e28 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -31,6 +31,7 @@ import Feature.migrateTo3 import config.Printers.{implicits, implicitsDetailed} import collection.mutable import reporting._ +import transform.Splicer import annotation.tailrec import scala.annotation.internal.sharable @@ -435,20 +436,15 @@ object Implicits: final protected def qualify(using Context): String = expectedType match { case SelectionProto(name, mproto, _, _) if !argument.isEmpty => - em"provide an extension method `$name` on ${argument.tpe}" + i"provide an extension method `$name` on ${argument.tpe}" case NoType => - if (argument.isEmpty) em"match expected type" - else em"convert from ${argument.tpe} to expected type" + if (argument.isEmpty) i"match expected type" + else i"convert from ${argument.tpe} to expected type" case _ => - if (argument.isEmpty) em"match type ${clarify(expectedType)}" - else em"convert from ${argument.tpe} to ${clarify(expectedType)}" + if (argument.isEmpty) i"match type ${clarify(expectedType)}" + else i"convert from ${argument.tpe} to ${clarify(expectedType)}" } - /** An explanation of the cause of the failure as a string */ - def explanation(using Context): String - - def msg(using Context): Message = explanation - /** If search was for an implicit conversion, a note describing the failure * in more detail - this is either empty or starts with a '\n' */ @@ -488,8 +484,9 @@ object Implicits: map(tp) } - def explanation(using Context): String = + def msg(using Context): Message = em"no implicit values were found that $qualify" + override def toString = s"NoMatchingImplicits($expectedType, $argument)" } @@ -509,20 +506,20 @@ object Implicits: i""" |Note that implicit conversions were not tried because the result of an implicit conversion |must be more specific than $target""" - override def explanation(using Context) = - i"""${super.explanation}. - |The expected type $target is not specific enough, so no search was attempted""" + + override def msg(using Context) = + super.msg.append("\nThe expected type $target is not specific enough, so no search was attempted") override def toString = s"TooUnspecific" /** An ambiguous implicits failure */ class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree) extends SearchFailureType { - def explanation(using Context): String = + def msg(using Context): Message = var str1 = err.refStr(alt1.ref) var str2 = err.refStr(alt2.ref) if str1 == str2 then str1 = ctx.printer.toTextRef(alt1.ref).show str2 = ctx.printer.toTextRef(alt2.ref).show - em"both $str1 and $str2 $qualify" + em"both $str1 and $str2 $qualify".withoutDisambiguation() override def whyNoConversion(using Context): String = if !argument.isEmpty && argument.tpe.widen.isRef(defn.NothingClass) then "" @@ -536,21 +533,21 @@ object Implicits: class MismatchedImplicit(ref: TermRef, val expectedType: Type, val argument: Tree) extends SearchFailureType { - def explanation(using Context): String = + def msg(using Context): Message = em"${err.refStr(ref)} does not $qualify" } class DivergingImplicit(ref: TermRef, val expectedType: Type, val argument: Tree) extends SearchFailureType { - def explanation(using Context): String = + def msg(using Context): Message = em"${err.refStr(ref)} produces a diverging implicit search when trying to $qualify" } /** A search failure type for attempted ill-typed extension method calls */ class FailedExtension(extApp: Tree, val expectedType: Type, val whyFailed: Message) extends SearchFailureType: def argument = EmptyTree - def explanation(using Context) = em"$extApp does not $qualify" + def msg(using Context) = em"$extApp does not $qualify" /** A search failure type for aborted searches of extension methods, typically * because of a cyclic reference or similar. @@ -558,7 +555,6 @@ object Implicits: class NestedFailure(_msg: Message, val expectedType: Type) extends SearchFailureType: def argument = EmptyTree override def msg(using Context) = _msg - def explanation(using Context) = msg.toString /** A search failure type for failed synthesis of terms for special types */ class SynthesisFailure(reasons: List[String], val expectedType: Type) extends SearchFailureType: @@ -568,10 +564,16 @@ object Implicits: if reasons.length > 1 then reasons.mkString("\n\t* ", "\n\t* ", "") else - reasons.mkString + reasons.mkString(" ", "", "") - def explanation(using Context) = em"Failed to synthesize an instance of type ${clarify(expectedType)}: ${formatReasons}" + def msg(using Context) = em"Failed to synthesize an instance of type ${clarify(expectedType)}:${formatReasons}" + class MacroErrorsFailure(errors: List[Diagnostic.Error], + val expectedType: Type, + val argument: Tree) extends SearchFailureType { + def msg(using Context): Message = + em"${errors.map(_.msg).mkString("\n")}" + } end Implicits import Implicits._ @@ -620,6 +622,8 @@ trait ImplicitRunInfo: traverse(t.prefix) case t: ThisType if t.cls.is(Module) && t.cls.isStaticOwner => traverse(t.cls.sourceModule.termRef) + case t: ThisType => + traverse(t.tref) case t: ConstantType => traverse(t.underlying) case t: TypeParamRef => @@ -627,12 +631,15 @@ trait ImplicitRunInfo: traverse(t.underlying) case t: TermParamRef => traverse(t.underlying) + case t: TypeLambda => + for p <- t.paramRefs do partSeen += p + traverseChildren(t) case t => traverseChildren(t) def apply(tp: Type): collection.Set[Type] = parts = mutable.LinkedHashSet() - partSeen.clear() + partSeen.clear(resetToInitial = false) traverse(tp) parts end collectParts @@ -738,6 +745,7 @@ trait ImplicitRunInfo: * - If `T` is a singleton reference, the anchors of its underlying type, plus, * if `T` is of the form `(P#x).type`, the anchors of `P`. * - If `T` is the this-type of a static object, the anchors of a term reference to that object. + * - If `T` is some other this-type `P.this.type`, the anchors of `P`. * - If `T` is some other type, the union of the anchors of each constituent type of `T`. * * The _implicit scope_ of a type `tp` is the smallest set S of term references (i.e. TermRefs) @@ -848,7 +856,7 @@ trait Implicits: inferred match { case SearchSuccess(_, ref, _, false) if isOldStyleFunctionConversion(ref.underlying) => report.migrationWarning( - i"The conversion ${ref} will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views.", + em"The conversion ${ref} will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views.", from ) case _ => @@ -902,7 +910,7 @@ trait Implicits: pt: Type, where: String, paramSymWithMethodCallTree: Option[(Symbol, Tree)] = None - )(using Context): String = { + )(using Context): Message = { def findHiddenImplicitsCtx(c: Context): Context = if c == NoContext then c else c.freshOver(findHiddenImplicitsCtx(c.outer)).addMode(Mode.FindHiddenImplicits) @@ -925,8 +933,34 @@ trait Implicits: // example where searching for a nested type causes an infinite loop. None - val error = new ImplicitSearchError(arg, pt, where, paramSymWithMethodCallTree, ignoredInstanceNormalImport, importSuggestionAddendum(pt)) - error.missingArgMsg + def allImplicits(currImplicits: ContextualImplicits): List[ImplicitRef] = + if currImplicits.outerImplicits == null then currImplicits.refs + else currImplicits.refs ::: allImplicits(currImplicits.outerImplicits) + + /** Whether the given type is for an implicit def that's a Scala 2 implicit conversion */ + def isImplicitDefConversion(typ: Type): Boolean = typ match { + case PolyType(_, resType) => isImplicitDefConversion(resType) + case mt: MethodType => !mt.isImplicitMethod && !mt.isContextualMethod + case _ => false + } + + def ignoredConvertibleImplicits = arg.tpe match + case fail: SearchFailureType => + if (fail.expectedType eq pt) || isFullyDefined(fail.expectedType, ForceDegree.none) then + // Get every implicit in scope and try to convert each + allImplicits(ctx.implicits) + .view + .map(_.underlyingRef) + .distinctBy(_.denot) + .filter { imp => + !isImplicitDefConversion(imp.underlying) + && imp.symbol != defn.Predef_conforms + && viewExists(imp, fail.expectedType) + } + else + Nil + + MissingImplicitArgument(arg, pt, where, paramSymWithMethodCallTree, ignoredInstanceNormalImport, ignoredConvertibleImplicits) } /** A string indicating the formal parameter corresponding to a missing argument */ @@ -935,11 +969,11 @@ trait Implicits: case Select(qual, nme.apply) if defn.isFunctionType(qual.tpe.widen) => val qt = qual.tpe.widen val qt1 = qt.dealiasKeepAnnots - def addendum = if (qt1 eq qt) "" else (i"\nThe required type is an alias of: $qt1") - em"parameter of ${qual.tpe.widen}$addendum" + def addendum = if (qt1 eq qt) "" else (i"\nWhere $qt is an alias of: $qt1") + i"parameter of ${qual.tpe.widen}$addendum" case _ => - em"${ if paramName.is(EvidenceParamName) then "an implicit parameter" - else s"parameter $paramName" } of $methodStr" + i"${ if paramName.is(EvidenceParamName) then "an implicit parameter" + else s"parameter $paramName" } of $methodStr" } /** A CanEqual[T, U] instance is assumed @@ -1005,11 +1039,10 @@ trait Implicits: if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase.phaseName}" else i"type error: ${argument.tpe} does not conform to $pt${err.whyNoMatchStr(argument.tpe, pt)}") - if pt.unusableForInference - || !argument.isEmpty && argument.tpe.unusableForInference - then return NoMatchingImplicitsFailure + val usableForInference = !pt.unusableForInference + && (argument.isEmpty || !argument.tpe.unusableForInference) - val result0 = + val result0 = if usableForInference then // If we are searching implicits when resolving an import symbol, start the search // in the first enclosing context that does not have the same scope and owner as the current // context. Without that precaution, an eligible implicit in the current scope @@ -1026,7 +1059,7 @@ trait Implicits: catch case ce: CyclicReference => ce.inImplicitSearch = true throw ce - end result0 + else NoMatchingImplicitsFailure val result = result0 match { @@ -1034,7 +1067,7 @@ trait Implicits: if result.tstate ne ctx.typerState then result.tstate.commit() if result.gstate ne ctx.gadt then - ctx.gadt.restore(result.gstate) + ctx.gadtState.restore(result.gstate) if hasSkolem(false, result.tree) then report.error(SkolemInInferred(result.tree, pt, argument), ctx.source.atSpan(span)) implicits.println(i"success: $result") @@ -1047,14 +1080,15 @@ trait Implicits: withMode(Mode.OldOverloadingResolution)(inferImplicit(pt, argument, span)) match { case altResult: SearchSuccess => report.migrationWarning( - s"According to new implicit resolution rules, this will be ambiguous:\n${result.reason.explanation}", + result.reason.msg + .prepend(s"According to new implicit resolution rules, this will be ambiguous:\n"), ctx.source.atSpan(span)) altResult case _ => result } else result - case NoMatchingImplicitsFailure => + case NoMatchingImplicitsFailure if usableForInference => SearchFailure(new NoMatchingImplicits(pt, argument, ctx.typerState.constraint), span) case _ => result0 @@ -1133,19 +1167,22 @@ trait Implicits: if ctx.reporter.hasErrors || !cand.ref.symbol.isAccessibleFrom(cand.ref.prefix) then - ctx.reporter.removeBufferedMessages - adapted.tpe match { + val res = adapted.tpe match { case _: SearchFailureType => SearchFailure(adapted) case error: PreviousErrorType if !adapted.symbol.isAccessibleFrom(cand.ref.prefix) => SearchFailure(adapted.withType(new NestedFailure(error.msg, pt))) - case _ => + case tpe => // Special case for `$conforms` and `<:<.refl`. Showing them to the users brings // no value, so we instead report a `NoMatchingImplicitsFailure` if (adapted.symbol == defn.Predef_conforms || adapted.symbol == defn.SubType_refl) NoMatchingImplicitsFailure + else if Splicer.inMacroExpansion && tpe <:< pt then + SearchFailure(adapted.withType(new MacroErrorsFailure(ctx.reporter.allErrors.reverse, pt, argument))) else SearchFailure(adapted.withType(new MismatchedImplicit(ref, pt, argument))) } + ctx.reporter.removeBufferedMessages + res else SearchSuccess(adapted, ref, cand.level, cand.isExtension)(ctx.typerState, ctx.gadt) } @@ -1354,13 +1391,13 @@ trait Implicits: def warnAmbiguousNegation(ambi: AmbiguousImplicits) = report.migrationWarning( - i"""Ambiguous implicits ${ambi.alt1.ref.symbol.showLocated} and ${ambi.alt2.ref.symbol.showLocated} - |seem to be used to implement a local failure in order to negate an implicit search. - |According to the new implicit resolution rules this is no longer possible; - |the search will fail with a global ambiguity error instead. - | - |Consider using the scala.util.NotGiven class to implement similar functionality.""", - srcPos) + em"""Ambiguous implicits ${ambi.alt1.ref.symbol.showLocated} and ${ambi.alt2.ref.symbol.showLocated} + |seem to be used to implement a local failure in order to negate an implicit search. + |According to the new implicit resolution rules this is no longer possible; + |the search will fail with a global ambiguity error instead. + | + |Consider using the scala.util.NotGiven class to implement similar functionality.""", + srcPos) /** Compare the length of the baseClasses of two symbols (except for objects, * where we use the length of the companion class instead if it's bigger). diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 27b83e025cf9..0e1c41ceef74 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -6,15 +6,14 @@ import core._ import ast._ import Contexts._, Types._, Flags._, Symbols._ import ProtoTypes._ -import NameKinds.{AvoidNameKind, UniqueName} +import NameKinds.UniqueName import util.Spans._ -import util.{Stats, SimpleIdentityMap, SrcPos} +import util.{Stats, SimpleIdentityMap, SimpleIdentitySet, SrcPos} import Decorators._ import config.Printers.{gadts, typr} import annotation.tailrec import reporting._ import collection.mutable - import scala.annotation.internal.sharable object Inferencing { @@ -27,12 +26,8 @@ object Inferencing { * but only if the overall result of `isFullyDefined` is `true`. * Variables that are successfully minimized do not count as uninstantiated. */ - def isFullyDefined(tp: Type, force: ForceDegree.Value)(using Context): Boolean = { - val nestedCtx = ctx.fresh.setNewTyperState() - val result = new IsFullyDefinedAccumulator(force)(using nestedCtx).process(tp) - if (result) nestedCtx.typerState.commit() - result - } + def isFullyDefined(tp: Type, force: ForceDegree.Value)(using Context): Boolean = + withFreshTyperState(new IsFullyDefinedAccumulator(force).process(tp), x => x) /** Try to fully define `tp`. Return whether constraint has changed. * Any changed constraint is kept. @@ -171,14 +166,18 @@ object Inferencing { private var toMaximize: List[TypeVar] = Nil - def apply(x: Boolean, tp: Type): Boolean = - try tp.dealias match + def apply(x: Boolean, tp: Type): Boolean = trace(i"isFullyDefined($tp, $force)", typr) { + try { + val tpd = tp.dealias + if tpd ne tp then apply(x, tpd) + else tp match case _: WildcardType | _: ProtoType => false case tvar: TypeVar if !tvar.isInstantiated => force.appliesTo(tvar) && ctx.typerState.constraint.contains(tvar) && { + var fail = false val direction = instDirection(tvar.origin) if minimizeSelected then if direction <= 0 && tvar.hasLowerBound then @@ -188,20 +187,19 @@ object Inferencing { // else hold off instantiating unbounded unconstrained variable else if direction != 0 then instantiate(tvar, fromBelow = direction < 0) - else if variance >= 0 && (force.ifBottom == IfBottom.ok || tvar.hasLowerBound) then + else if variance >= 0 && (force.ifBottom == IfBottom.ok && !tvar.hasUpperBound || tvar.hasLowerBound) then instantiate(tvar, fromBelow = true) else if variance >= 0 && force.ifBottom == IfBottom.fail then - return false + fail = true else toMaximize = tvar :: toMaximize - foldOver(x, tvar) - } - case tp => - reporting.trace(s"IFT $tp") { - foldOver(x, tp) + !fail && foldOver(x, tvar) } + case tp => foldOver(x, tp) + } catch case ex: Throwable => handleRecursive("check fully defined", tp.show, ex) + } def process(tp: Type): Boolean = // Maximize type vars in the order they were visited before */ @@ -267,7 +265,7 @@ object Inferencing { && ctx.gadt.contains(tp.symbol) => val sym = tp.symbol - val res = ctx.gadt.approximation(sym, fromBelow = variance < 0) + val res = ctx.gadtState.approximation(sym, fromBelow = variance < 0) gadts.println(i"approximated $tp ~~ $res") res @@ -312,16 +310,17 @@ object Inferencing { } /** If `tree` has a type lambda type, infer its type parameters by comparing with expected type `pt` */ - def inferTypeParams(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match { + def inferTypeParams(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match case tl: TypeLambda => val (tl1, tvars) = constrained(tl, tree) var tree1 = AppliedTypeTree(tree.withType(tl1), tvars) tree1.tpe <:< pt - fullyDefinedType(tree1.tpe, "template parent", tree.srcPos) - tree1 + if isFullyDefined(tree1.tpe, force = ForceDegree.failBottom) then + tree1 + else + EmptyTree case _ => tree - } def isSkolemFree(tp: Type)(using Context): Boolean = !tp.existsPart(_.isInstanceOf[SkolemType]) @@ -418,7 +417,7 @@ object Inferencing { if safeToInstantiate then tvar.instantiate(fromBelow = v == -1) else { val bounds = TypeComparer.fullBounds(tvar.origin) - if bounds.hi <:< bounds.lo || bounds.hi.classSymbol.is(Final) then + if (bounds.hi frozen_<:< bounds.lo) || bounds.hi.classSymbol.is(Final) then tvar.instantiate(fromBelow = false) else { // We do not add the created symbols to GADT constraint immediately, since they may have inter-dependencies. @@ -437,7 +436,7 @@ object Inferencing { } // We add the created symbols to GADT constraint here. - if (res.nonEmpty) ctx.gadt.addToConstraint(res) + if (res.nonEmpty) ctx.gadtState.addToConstraint(res) res } @@ -547,6 +546,10 @@ object Inferencing { case tp: AnnotatedType => tp.derivedAnnotatedType(captureWildcards(tp.parent), tp.annot) case _ => tp } + + def hasCaptureConversionArg(tp: Type)(using Context): Boolean = tp match + case tp: AppliedType => tp.args.exists(_.typeSymbol == defn.TypeBox_CAP) + case _ => false } trait Inferencing { this: Typer => @@ -574,7 +577,7 @@ trait Inferencing { this: Typer => * Then `Y` also occurs co-variantly in `T` because it needs to be minimized in order to constrain * `T` the least. See `variances` for more detail. */ - def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = { + def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = val state = ctx.typerState // Note that some variables in `locked` might not be in `state.ownedVars` @@ -583,7 +586,7 @@ trait Inferencing { this: Typer => // `qualifying`. val ownedVars = state.ownedVars - if ((ownedVars ne locked) && !ownedVars.isEmpty) { + if (ownedVars ne locked) && !ownedVars.isEmpty then val qualifying = ownedVars -- locked if (!qualifying.isEmpty) { typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") @@ -619,44 +622,67 @@ trait Inferencing { this: Typer => if state.reporter.hasUnreportedErrors then return tree def constraint = state.constraint - type InstantiateQueue = mutable.ListBuffer[(TypeVar, Boolean)] - val toInstantiate = new InstantiateQueue - for tvar <- qualifying do - if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then - constrainIfDependentParamRef(tvar, tree) - // Needs to be checked again, since previous interpolations could already have - // instantiated `tvar` through unification. - val v = vs(tvar) - if v == null then - // Even though `tvar` is non-occurring in `v`, the specific - // instantiation we pick still matters because `tvar` might appear - // in the bounds of a non-`qualifying` type variable in the - // constraint. - // In particular, if `tvar` was created as the upper or lower - // bound of an existing variable by `LevelAvoidMap`, we - // instantiate it in the direction corresponding to the - // original variable which might be further constrained later. - // Otherwise, we simply rely on `hasLowerBound`. - val name = tvar.origin.paramName - val fromBelow = - name.is(AvoidNameKind.UpperBound) || - !name.is(AvoidNameKind.LowerBound) && tvar.hasLowerBound - typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") - toInstantiate += ((tvar, fromBelow)) - else if v.intValue != 0 then - typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") - toInstantiate += ((tvar, v.intValue == 1)) - else comparing(cmp => - if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then - // Invariant: The type of a tree whose enclosing scope is level - // N only contains type variables of level <= N. - typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") - cmp.atLevel(ctx.nestingLevel, tvar.origin) - else - typr.println(i"no interpolation for nonvariant $tvar in $state") - ) - /** Instantiate all type variables in `buf` in the indicated directions. + /** Values of this type report type variables to instantiate with variance indication: + * +1 variable appears covariantly, can be instantiated from lower bound + * -1 variable appears contravariantly, can be instantiated from upper bound + * 0 variable does not appear at all, can be instantiated from either bound + */ + type ToInstantiate = List[(TypeVar, Int)] + + val toInstantiate: ToInstantiate = + val buf = new mutable.ListBuffer[(TypeVar, Int)] + for tvar <- qualifying do + if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then + constrainIfDependentParamRef(tvar, tree) + if !tvar.isInstantiated then + // isInstantiated needs to be checked again, since previous interpolations could already have + // instantiated `tvar` through unification. + val v = vs(tvar) + if v == null then buf += ((tvar, 0)) + else if v.intValue != 0 then buf += ((tvar, v.intValue)) + else comparing(cmp => + if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then + // Invariant: The type of a tree whose enclosing scope is level + // N only contains type variables of level <= N. + typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") + cmp.atLevel(ctx.nestingLevel, tvar.origin) + else + typr.println(i"no interpolation for nonvariant $tvar in $state") + ) + buf.toList + + def typeVarsIn(xs: ToInstantiate): TypeVars = + xs.foldLeft(SimpleIdentitySet.empty: TypeVars)((tvs, tvi) => tvs + tvi._1) + + /** Filter list of proposed instantiations so that they don't constrain further + * the current constraint. + */ + def filterByDeps(tvs0: ToInstantiate): ToInstantiate = + val excluded = // ignore dependencies from other variables that are being instantiated + typeVarsIn(tvs0) + def step(tvs: ToInstantiate): ToInstantiate = tvs match + case tvs @ (hd @ (tvar, v)) :: tvs1 => + def aboveOK = !constraint.dependsOn(tvar, excluded, co = true) + def belowOK = !constraint.dependsOn(tvar, excluded, co = false) + if v == 0 && !aboveOK then + step((tvar, 1) :: tvs1) + else if v == 0 && !belowOK then + step((tvar, -1) :: tvs1) + else if v == -1 && !aboveOK || v == 1 && !belowOK then + typr.println(i"drop $tvar, $v in $tp, $pt, qualifying = ${qualifying.toList}, tvs0 = ${tvs0.toList}%, %, excluded = ${excluded.toList}, $constraint") + step(tvs1) + else // no conflict, keep the instantiation proposal + tvs.derivedCons(hd, step(tvs1)) + case Nil => + Nil + val tvs1 = step(tvs0) + if tvs1 eq tvs0 then tvs1 + else filterByDeps(tvs1) // filter again with smaller excluded set + end filterByDeps + + /** Instantiate all type variables in `tvs` in the indicated directions, + * as described in the doc comment of `ToInstantiate`. * If a type variable A is instantiated from below, and there is another * type variable B in `buf` that is known to be smaller than A, wait and * instantiate all other type variables before trying to instantiate A again. @@ -685,29 +711,37 @@ trait Inferencing { this: Typer => * * V2 := V3, O2 := O3 */ - def doInstantiate(buf: InstantiateQueue): Unit = - if buf.nonEmpty then - val suspended = new InstantiateQueue - while buf.nonEmpty do - val first @ (tvar, fromBelow) = buf.head - buf.dropInPlace(1) - if !tvar.isInstantiated then - val suspend = buf.exists{ (following, _) => - if fromBelow then - constraint.isLess(following.origin, tvar.origin) - else - constraint.isLess(tvar.origin, following.origin) + def doInstantiate(tvs: ToInstantiate): Unit = + + /** Try to instantiate `tvs`, return any suspended type variables */ + def tryInstantiate(tvs: ToInstantiate): ToInstantiate = tvs match + case (hd @ (tvar, v)) :: tvs1 => + val fromBelow = v == 1 || (v == 0 && tvar.hasLowerBound) + typr.println( + i"interpolate${if v == 0 then " non-occurring" else ""} $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") + if tvar.isInstantiated then + tryInstantiate(tvs1) + else + val suspend = tvs1.exists{ (following, _) => + if fromBelow + then constraint.isLess(following.origin, tvar.origin) + else constraint.isLess(tvar.origin, following.origin) } - if suspend then suspended += first else tvar.instantiate(fromBelow) - end if - end while - doInstantiate(suspended) + if suspend then + typr.println(i"suspended: $hd") + hd :: tryInstantiate(tvs1) + else + tvar.instantiate(fromBelow) + tryInstantiate(tvs1) + case Nil => Nil + if tvs.nonEmpty then doInstantiate(tryInstantiate(tvs)) end doInstantiate - doInstantiate(toInstantiate) + + doInstantiate(filterByDeps(toInstantiate)) } - } + end if tree - } + end interpolateTypeVars /** If `tvar` represents a parameter of a dependent method type in the current `call` * approximate it from below with the type of the actual argument. Skolemize that @@ -737,13 +771,14 @@ trait Inferencing { this: Typer => end constrainIfDependentParamRef } -/** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */ +/** An enumeration controlling the degree of forcing in "is-fully-defined" checks. */ @sharable object ForceDegree { - class Value(val appliesTo: TypeVar => Boolean, val ifBottom: IfBottom) - val none: Value = new Value(_ => false, IfBottom.ok) - val all: Value = new Value(_ => true, IfBottom.ok) - val failBottom: Value = new Value(_ => true, IfBottom.fail) - val flipBottom: Value = new Value(_ => true, IfBottom.flip) + class Value(val appliesTo: TypeVar => Boolean, val ifBottom: IfBottom): + override def toString = s"ForceDegree.Value(.., $ifBottom)" + val none: Value = new Value(_ => false, IfBottom.ok) { override def toString = "ForceDegree.none" } + val all: Value = new Value(_ => true, IfBottom.ok) { override def toString = "ForceDegree.all" } + val failBottom: Value = new Value(_ => true, IfBottom.fail) { override def toString = "ForceDegree.failBottom" } + val flipBottom: Value = new Value(_ => true, IfBottom.flip) { override def toString = "ForceDegree.flipBottom" } } enum IfBottom: diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index ad8d0e50d348..4eeb5540f137 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -201,7 +201,7 @@ class Namer { typer: Typer => case tree: MemberDef => SymDenotations.canBeLocal(tree.name, flags) case _ => false if !ok then - report.error(i"modifier(s) `${flags.flagsString}` incompatible with $kind definition", tree.srcPos) + report.error(em"modifier(s) `${flags.flagsString}` incompatible with $kind definition", tree.srcPos) if adapted.is(Private) && canBeLocal then adapted | Local else adapted } @@ -461,8 +461,8 @@ class Namer { typer: Typer => val isProvisional = parents.exists(!_.baseType(defn.AnyClass).exists) if isProvisional then typr.println(i"provisional superclass $first for $cls") - first = AnnotatedType(first, Annotation(defn.ProvisionalSuperClassAnnot)) - checkFeasibleParent(first, cls.srcPos, em" in inferred superclass $first") :: parents + first = AnnotatedType(first, Annotation(defn.ProvisionalSuperClassAnnot, cls.span)) + checkFeasibleParent(first, cls.srcPos, i" in inferred superclass $first") :: parents end ensureFirstIsClass /** Add child annotation for `child` to annotations of `cls`. The annotation @@ -541,7 +541,11 @@ class Namer { typer: Typer => res = cpy.TypeDef(modCls)( rhs = cpy.Template(modTempl)( derived = if (fromTempl.derived.nonEmpty) fromTempl.derived else modTempl.derived, - body = fromTempl.body ++ modTempl.body)) + body = fromTempl.body.filter { + case stat: DefDef => stat.name != nme.toString_ + // toString should only be generated if explicit companion is missing + case _ => true + } ++ modTempl.body)) if (fromTempl.derived.nonEmpty) { if (modTempl.derived.nonEmpty) report.error(em"a class and its companion cannot both have `derives` clauses", mdef.srcPos) @@ -762,7 +766,7 @@ class Namer { typer: Typer => } def missingType(sym: Symbol, modifier: String)(using Context): Unit = { - report.error(s"${modifier}type of implicit definition needs to be given explicitly", sym.srcPos) + report.error(em"${modifier}type of implicit definition needs to be given explicitly", sym.srcPos) sym.resetFlag(GivenOrImplicit) } @@ -831,9 +835,9 @@ class Namer { typer: Typer => for (annotTree <- original.mods.annotations) { val cls = typedAheadAnnotationClass(annotTree)(using annotCtx) if (cls eq sym) - report.error("An annotation class cannot be annotated with iself", annotTree.srcPos) + report.error(em"An annotation class cannot be annotated with iself", annotTree.srcPos) else { - val ann = Annotation.deferred(cls)(typedAheadAnnotation(annotTree)(using annotCtx)) + val ann = Annotation.deferred(cls)(typedAheadExpr(annotTree)(using annotCtx)) sym.addAnnotation(ann) } } @@ -1227,13 +1231,21 @@ class Namer { typer: Typer => case pt: MethodOrPoly => 1 + extensionParamsCount(pt.resType) case _ => 0 val ddef = tpd.DefDef(forwarder.asTerm, prefss => { + val forwarderCtx = ctx.withOwner(forwarder) val (pathRefss, methRefss) = prefss.splitAt(extensionParamsCount(path.tpe.widen)) val ref = path.appliedToArgss(pathRefss).select(sym.asTerm) - ref.appliedToArgss(adaptForwarderParams(Nil, sym.info, methRefss)) - .etaExpandCFT(using ctx.withOwner(forwarder)) + val rhs = ref.appliedToArgss(adaptForwarderParams(Nil, sym.info, methRefss)) + .etaExpandCFT(using forwarderCtx) + if forwarder.isInlineMethod then + // Eagerly make the body inlineable. `registerInlineInfo` does this lazily + // but it does not get evaluated during typer as the forwarder we are creating + // is already typed. + val inlinableRhs = PrepareInlineable.makeInlineable(rhs)(using forwarderCtx) + PrepareInlineable.registerInlineInfo(forwarder, inlinableRhs)(using forwarderCtx) + inlinableRhs + else + rhs }) - if forwarder.isInlineMethod then - PrepareInlineable.registerInlineInfo(forwarder, ddef.rhs) buf += ddef.withSpan(span) if hasDefaults then foreachDefaultGetterOf(sym.asTerm, @@ -1249,7 +1261,7 @@ class Namer { typer: Typer => val reason = mbrs.map(canForward(_, alias)).collect { case CanForward.No(whyNot) => i"\n$path.$name cannot be exported because it $whyNot" }.headOption.getOrElse("") - report.error(i"""no eligible member $name at $path$reason""", ctx.source.atSpan(span)) + report.error(em"""no eligible member $name at $path$reason""", ctx.source.atSpan(span)) else targets += alias @@ -1314,7 +1326,7 @@ class Namer { typer: Typer => case _ => 0 if cmp == 0 then report.error( - ex"""Clashing exports: The exported + em"""Clashing exports: The exported | ${forwarder.rhs.symbol}: ${alt1.widen} |and ${forwarder1.rhs.symbol}: ${alt2.widen} |have the same signature after erasure and overloading resolution could not disambiguate.""", @@ -1335,7 +1347,7 @@ class Namer { typer: Typer => * * The idea is that this simulates the hypothetical case where export forwarders * are not generated and we treat an export instead more like an import where we - * expand the use site reference. Test cases in {neg,pos}/i14699.scala. + * expand the use site reference. Test cases in {neg,pos}/i14966.scala. * * @pre Forwarders with the same name are consecutive in `forwarders`. */ @@ -1437,7 +1449,7 @@ class Namer { typer: Typer => case mt: MethodType if cls.is(Case) && mt.isParamDependent => // See issue #8073 for background report.error( - i"""Implementation restriction: case classes cannot have dependencies between parameters""", + em"""Implementation restriction: case classes cannot have dependencies between parameters""", cls.srcPos) case _ => @@ -1453,27 +1465,41 @@ class Namer { typer: Typer => * only if parent type contains uninstantiated type parameters. */ def parentType(parent: untpd.Tree)(using Context): Type = - if (parent.isType) - typedAheadType(parent, AnyTypeConstructorProto).tpe - else { - val (core, targs) = stripApply(parent) match { + + def typedParentApplication(parent: untpd.Tree): Type = + val (core, targs) = stripApply(parent) match case TypeApply(core, targs) => (core, targs) case core => (core, Nil) - } - core match { + core match case Select(New(tpt), nme.CONSTRUCTOR) => val targs1 = targs map (typedAheadType(_)) val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes if (ptype.typeParams.isEmpty) ptype - else { + else if (denot.is(ModuleClass) && denot.sourceModule.isOneOf(GivenOrImplicit)) missingType(denot.symbol, "parent ")(using creationContext) fullyDefinedType(typedAheadExpr(parent).tpe, "class parent", parent.srcPos) - } case _ => UnspecifiedErrorType.assertingErrorsReported - } - } + + def typedParentType(tree: untpd.Tree): tpd.Tree = + val parentTpt = typer.typedType(parent, AnyTypeConstructorProto) + val ptpe = parentTpt.tpe + if ptpe.typeParams.nonEmpty + && ptpe.underlyingClassRef(refinementOK = false).exists + then + // Try to infer type parameters from a synthetic application. + // This might yield new info if implicit parameters are resolved. + // A test case is i16778.scala. + val app = untpd.Apply(untpd.Select(untpd.New(parentTpt), nme.CONSTRUCTOR), Nil) + typedParentApplication(app) + app.getAttachment(TypedAhead).getOrElse(parentTpt) + else + parentTpt + + if parent.isType then typedAhead(parent, typedParentType).tpe + else typedParentApplication(parent) + end parentType /** Check parent type tree `parent` for the following well-formedness conditions: * (1) It must be a class type with a stable prefix (@see checkClassTypeWithStablePrefix) @@ -1607,7 +1633,7 @@ class Namer { typer: Typer => case Some(ttree) => ttree case none => val ttree = typed(tree) - xtree.putAttachment(TypedAhead, ttree) + if !ttree.isEmpty then xtree.putAttachment(TypedAhead, ttree) ttree } } @@ -1618,15 +1644,14 @@ class Namer { typer: Typer => def typedAheadExpr(tree: Tree, pt: Type = WildcardType)(using Context): tpd.Tree = typedAhead(tree, typer.typedExpr(_, pt)) - def typedAheadAnnotation(tree: Tree)(using Context): tpd.Tree = - typedAheadExpr(tree, defn.AnnotationClass.typeRef) - - def typedAheadAnnotationClass(tree: Tree)(using Context): Symbol = tree match { + def typedAheadAnnotationClass(tree: Tree)(using Context): Symbol = tree match case Apply(fn, _) => typedAheadAnnotationClass(fn) case TypeApply(fn, _) => typedAheadAnnotationClass(fn) case Select(qual, nme.CONSTRUCTOR) => typedAheadAnnotationClass(qual) case New(tpt) => typedAheadType(tpt).tpe.classSymbol - } + case TypedSplice(_) => + val sym = tree.symbol + if sym.isConstructor then sym.owner else sym /** Enter and typecheck parameter list */ def completeParams(params: List[MemberDef])(using Context): Unit = { @@ -1690,8 +1715,10 @@ class Namer { typer: Typer => if !Config.checkLevelsOnConstraints then val hygienicType = TypeOps.avoid(rhsType, termParamss.flatten) if (!hygienicType.isValueType || !(hygienicType <:< tpt.tpe)) - report.error(i"return type ${tpt.tpe} of lambda cannot be made hygienic;\n" + - i"it is not a supertype of the hygienic type $hygienicType", mdef.srcPos) + report.error( + em"""return type ${tpt.tpe} of lambda cannot be made hygienic + |it is not a supertype of the hygienic type $hygienicType""", + mdef.srcPos) //println(i"lifting $rhsType over $termParamss -> $hygienicType = ${tpt.tpe}") //println(TypeComparer.explained { implicit ctx => hygienicType <:< tpt.tpe }) case _ => @@ -1863,7 +1890,7 @@ class Namer { typer: Typer => // so we must allow constraining its type parameters // compare with typedDefDef, see tests/pos/gadt-inference.scala rhsCtx.setFreshGADTBounds - rhsCtx.gadt.addToConstraint(typeParams) + rhsCtx.gadtState.addToConstraint(typeParams) } def typedAheadRhs(pt: Type) = @@ -1882,7 +1909,7 @@ class Namer { typer: Typer => // larger choice of overrides (see `default-getter.scala`). // For justification on the use of `@uncheckedVariance`, see // `default-getter-variance.scala`. - AnnotatedType(defaultTp, Annotation(defn.UncheckedVarianceAnnot)) + AnnotatedType(defaultTp, Annotation(defn.UncheckedVarianceAnnot, sym.span)) else // don't strip @uncheckedVariance annot for default getters TypeOps.simplify(tp.widenTermRefExpr, diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index b53ef28dc8f7..bde279c582e6 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -13,6 +13,8 @@ import Decorators._ import Uniques._ import inlines.Inlines import config.Printers.typr +import Inferencing.* +import ErrorReporting.* import util.SourceFile import TypeComparer.necessarySubType @@ -131,10 +133,18 @@ object ProtoTypes { /** A class marking ignored prototypes that can be revealed by `deepenProto` */ abstract case class IgnoredProto(ignored: Type) extends CachedGroundType with MatchAlways: + private var myWasDeepened = false override def revealIgnored = ignored - override def deepenProto(using Context): Type = ignored + override def deepenProto(using Context): Type = + myWasDeepened = true + ignored override def deepenProtoTrans(using Context): Type = ignored.deepenProtoTrans + /** Did someone look inside via deepenProto? Used for error deagniostics + * to give a more extensive expected type. + */ + def wasDeepened: Boolean = myWasDeepened + override def computeHash(bs: Hashable.Binders): Int = doHash(bs, ignored) override def eql(that: Type): Boolean = that match @@ -287,6 +297,8 @@ object ProtoTypes { */ @sharable object AnySelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true) + @sharable object SingletonTypeProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true) + /** A prototype for selections in pattern constructors */ class UnapplySelectionProto(name: Name) extends SelectionProto(name, WildcardType, NoViewsAllowed, true) @@ -360,7 +372,7 @@ object ProtoTypes { private def isUndefined(tp: Type): Boolean = tp match { case _: WildcardType => true - case defn.FunctionOf(args, result, _, _) => args.exists(isUndefined) || isUndefined(result) + case defn.FunctionOf(args, result, _) => args.exists(isUndefined) || isUndefined(result) case _ => false } @@ -482,7 +494,21 @@ object ProtoTypes { val targ = cacheTypedArg(arg, typer.typedUnadapted(_, wideFormal, locked)(using argCtx), force = true) - typer.adapt(targ, wideFormal, locked) + val targ1 = typer.adapt(targ, wideFormal, locked) + if wideFormal eq formal then targ1 + else checkNoWildcardCaptureForCBN(targ1) + } + + def checkNoWildcardCaptureForCBN(targ1: Tree)(using Context): Tree = { + if hasCaptureConversionArg(targ1.tpe) then + val tp = stripCast(targ1).tpe + errorTree(targ1, + em"""argument for by-name parameter is not a value + |and contains wildcard arguments: $tp + | + |Assign it to a val and pass that instead. + |""") + else targ1 } /** The type of the argument `arg`, or `NoType` if `arg` has not been typed before @@ -661,10 +687,12 @@ object ProtoTypes { * * [] _ */ - @sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways + @sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways: + override def toString = "AnyFunctionProto" /** A prototype for type constructors that are followed by a type application */ - @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways + @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways: + override def toString = "AnyTypeConstructorProto" extension (pt: Type) def isExtensionApplyProto: Boolean = pt match @@ -814,7 +842,7 @@ object ProtoTypes { normalize(et.resultType, pt) case wtp => val iftp = defn.asContextFunctionType(wtp) - if iftp.exists && followIFT then normalize(iftp.dropDependentRefinement.argInfos.last, pt) + if iftp.exists && followIFT then normalize(iftp.functionArgInfos.last, pt) else tp } } @@ -936,8 +964,8 @@ object ProtoTypes { object dummyTreeOfType { def apply(tp: Type)(implicit src: SourceFile): Tree = untpd.Literal(Constant(null)) withTypeUnchecked tp - def unapply(tree: untpd.Tree): Option[Type] = tree match { - case Literal(Constant(null)) => Some(tree.typeOpt) + def unapply(tree: untpd.Tree): Option[Type] = untpd.unsplice(tree) match { + case tree @ Literal(Constant(null)) => Some(tree.typeOpt) case _ => None } } diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index fa29f450be2a..8473bd168bc5 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -11,11 +11,12 @@ import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.NameKinds.PatMatGivenVarName import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StagingContext._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ import dotty.tools.dotc.inlines.PrepareInlineable +import dotty.tools.dotc.staging.QuoteContext.* +import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.typer.Implicits._ import dotty.tools.dotc.typer.Inferencing._ @@ -54,7 +55,7 @@ trait QuotesAndSplices { val msg = em"""Quoted types `'[..]` can only be used in patterns. | |Hint: To get a scala.quoted.Type[T] use scala.quoted.Type.of[T] instead. - |""".stripMargin + |""" report.error(msg, tree.srcPos) EmptyTree else @@ -87,11 +88,11 @@ trait QuotesAndSplices { ref(defn.QuotedRuntime_exprSplice).appliedToType(argType).appliedTo(pat) } else { - report.error(i"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.expr.srcPos) + report.error(em"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.expr.srcPos) tree.withType(UnspecifiedErrorType) } else { - if (StagingContext.level == 0) { + if (level == 0) { // Mark the first inline method from the context as a macro def markAsMacro(c: Context): Unit = if (c.owner eq c.outer.owner) markAsMacro(c.outer) @@ -123,7 +124,7 @@ trait QuotesAndSplices { assert(ctx.mode.is(Mode.QuotedPattern)) val untpd.Apply(splice: untpd.Splice, args) = tree: @unchecked if !isFullyDefined(pt, ForceDegree.flipBottom) then - report.error(i"Type must be fully defined.", splice.srcPos) + report.error(em"Type must be fully defined.", splice.srcPos) tree.withType(UnspecifiedErrorType) else if splice.isInBraces then // ${x}(...) match an application val typedArgs = args.map(arg => typedExpr(arg)) @@ -172,10 +173,10 @@ trait QuotesAndSplices { report.error("Splice ${...} outside quotes '{...} or inline method", tree.srcPos) else if (level < 0) report.error( - s"""Splice $${...} at level $level. - | - |Inline method may contain a splice at level 0 but the contents of this splice cannot have a splice. - |""".stripMargin, tree.srcPos + em"""Splice $${...} at level $level. + | + |Inline method may contain a splice at level 0 but the contents of this splice cannot have a splice. + |""", tree.srcPos ) /** Split a typed quoted pattern is split into its type bindings, pattern expression and inner patterns. @@ -249,7 +250,7 @@ trait QuotesAndSplices { val pat1 = if (patType eq patType1) pat else pat.withType(patType1) patBuf += pat1 } - case Select(pat, _) if tree.symbol.isTypeSplice => + case Select(pat: Bind, _) if tree.symbol.isTypeSplice => val sym = tree.tpe.dealias.typeSymbol if sym.exists then val tdef = TypeDef(sym.asType).withSpan(sym.span) @@ -263,7 +264,7 @@ trait QuotesAndSplices { transformTypeBindingTypeDef(PatMatGivenVarName.fresh(tdef.name.toTermName), tdef, typePatBuf) else if tdef.symbol.isClass then val kind = if tdef.symbol.is(Module) then "objects" else "classes" - report.error("Implementation restriction: cannot match " + kind, tree.srcPos) + report.error(em"Implementation restriction: cannot match $kind", tree.srcPos) EmptyTree else super.transform(tree) @@ -364,7 +365,7 @@ trait QuotesAndSplices { * * ``` * case scala.internal.quoted.Expr.unapply[ - * Tuple1[t @ _], // Type binging definition + * KList[t @ _, KNil], // Type binging definition * Tuple2[Type[t], Expr[List[t]]] // Typing the result of the pattern match * ]( * Tuple2.unapply @@ -411,7 +412,7 @@ trait QuotesAndSplices { val replaceBindings = new ReplaceBindings val patType = defn.tupleType(splices.tpes.map(tpe => replaceBindings(tpe.widen))) - val typeBindingsTuple = tpd.tupleTypeTree(typeBindings.values.toList) + val typeBindingsTuple = tpd.hkNestedPairsTypeTree(typeBindings.values.toList) val replaceBindingsInTree = new TreeMap { private var bindMap = Map.empty[Symbol, Symbol] diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 7099234c80e1..c64f541fd811 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -71,7 +71,7 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking promote(tree) override def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = - promote(TypeTree(tree.tpe).withSpan(tree.span)) + promote(TypeTree(tree.typeOpt).withSpan(tree.span)) override def typedExport(exp: untpd.Export)(using Context): Export = promote(exp) @@ -87,8 +87,8 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking // retract PatternOrTypeBits like in typedExpr withoutMode(Mode.PatternOrTypeBits)(typedUnadapted(tree.fun, AnyFunctionProto)) val implicits1 = tree.implicits.map(typedExpr(_)) - val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.tpe)) - untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.tpe) + val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.typeOpt)) + untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.typeOpt) } override def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = @@ -124,12 +124,10 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = try super.typedUnadapted(tree, pt, locked) - catch { - case NonFatal(ex) => - if ctx.phase != Phases.typerPhase && ctx.phase != Phases.inliningPhase then - println(i"exception while typing $tree of class ${tree.getClass} # ${tree.uniqueId}") - throw ex - } + catch case NonFatal(ex) if ctx.phase != Phases.typerPhase && ctx.phase != Phases.inliningPhase && !ctx.run.enrichedErrorMessage => + val treeStr = tree.show(using ctx.withPhase(ctx.phase.prevMega)) + println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) + throw ex override def inlineExpansion(mdef: DefDef)(using Context): List[Tree] = mdef :: Nil diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 94eacca5c7db..4ca00ce6366f 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -15,8 +15,8 @@ import config.Printers.{checks, noPrinter} import Decorators._ import OverridingPairs.isOverridingPair import typer.ErrorReporting._ -import config.Feature.{warnOnMigration, migrateTo3} -import config.SourceVersion.`3.0` +import config.Feature.{warnOnMigration, migrateTo3, sourceVersion} +import config.SourceVersion.{`3.0`, `future`} import config.Printers.refcheck import reporting._ import Constants.Constant @@ -58,11 +58,9 @@ object RefChecks { // constructors of different classes are allowed to have defaults if (haveDefaults.exists(x => !x.isConstructor) || owners.distinct.size < haveDefaults.size) report.error( - "in " + clazz + - ", multiple overloaded alternatives of " + haveDefaults.head + - " define default arguments" + ( - if (owners.forall(_ == clazz)) "." - else ".\nThe members with defaults are defined in " + owners.map(_.showLocated).mkString("", " and ", ".")), + em"in $clazz, multiple overloaded alternatives of ${haveDefaults.head} define default arguments${ + if owners.forall(_ == clazz) then "." + else i".\nThe members with defaults are defined in ${owners.map(_.showLocated).mkString("", " and ", ".")}"}", clazz.srcPos) } } @@ -91,24 +89,39 @@ object RefChecks { cls.thisType } + /** - Check that self type of `cls` conforms to self types of all `parents` as seen from + * `cls.thisType` + * - If self type of `cls` is explicit, check that it conforms to the self types + * of all its class symbols. + * @param deep If true and a self type of a parent is not given explicitly, recurse to + * check against the parents of the parent. This is needed when capture checking, + * since we assume (& check) that the capture set of an inferred self type + * is the intersection of the capture sets of all its parents + */ + def checkSelfAgainstParents(cls: ClassSymbol, parents: List[Symbol])(using Context): Unit = + withMode(Mode.CheckBoundsOrSelfType) { + val cinfo = cls.classInfo + + def checkSelfConforms(other: ClassSymbol) = + val otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) + if otherSelf.exists then + if !(cinfo.selfType <:< otherSelf) then + report.error(DoesNotConformToSelfType("illegal inheritance", cinfo.selfType, cls, otherSelf, "parent", other), + cls.srcPos) + + for psym <- parents do + checkSelfConforms(psym.asClass) + } + end checkSelfAgainstParents + /** Check that self type of this class conforms to self types of parents * and required classes. Also check that only `enum` constructs extend * `java.lang.Enum` and no user-written class extends ContextFunctionN. */ def checkParents(cls: Symbol, parentTrees: List[Tree])(using Context): Unit = cls.info match { case cinfo: ClassInfo => - def checkSelfConforms(other: ClassSymbol, category: String, relation: String) = { - val otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) - if otherSelf.exists && !(cinfo.selfType <:< otherSelf) then - report.error(DoesNotConformToSelfType(category, cinfo.selfType, cls, otherSelf, relation, other), - cls.srcPos) - } val psyms = cls.asClass.parentSyms - for (psym <- psyms) - checkSelfConforms(psym.asClass, "illegal inheritance", "parent") - for reqd <- cinfo.cls.givenSelfType.classSymbols do - if reqd != cls then - checkSelfConforms(reqd, "missing requirement", "required") + checkSelfAgainstParents(cls.asClass, psyms) def isClassExtendingJavaEnum = !cls.isOneOf(Enum | Trait) && psyms.contains(defn.JavaEnumClass) @@ -221,9 +234,16 @@ object RefChecks { && inLinearizationOrder(sym1, sym2, parent) && !sym2.is(AbsOverride) - def checkAll(checkOverride: (Symbol, Symbol) => Unit) = + // Checks the subtype relationship tp1 <:< tp2. + // It is passed to the `checkOverride` operation in `checkAll`, to be used for + // compatibility checking. + def checkSubType(tp1: Type, tp2: Type)(using Context): Boolean = tp1 frozen_<:< tp2 + + private val subtypeChecker: (Type, Type) => Context ?=> Boolean = this.checkSubType + + def checkAll(checkOverride: ((Type, Type) => Context ?=> Boolean, Symbol, Symbol) => Unit) = while hasNext do - checkOverride(overriding, overridden) + checkOverride(subtypeChecker, overriding, overridden) next() // The OverridingPairs cursor does assume that concrete overrides abstract @@ -237,7 +257,7 @@ object RefChecks { if dcl.is(Deferred) then for other <- dcl.allOverriddenSymbols do if !other.is(Deferred) then - checkOverride(dcl, other) + checkOverride(checkSubType, dcl, other) end checkAll end OverridingPairsChecker @@ -264,6 +284,8 @@ object RefChecks { * 1.10. If O is inline (and deferred, otherwise O would be final), M must be inline * 1.11. If O is a Scala-2 macro, M must be a Scala-2 macro. * 1.12. If O is non-experimental, M must be non-experimental. + * 1.13 Under -source future, if O is a val parameter, M must be a val parameter + * that passes its value on to O. * 2. Check that only abstract classes have deferred members * 3. Check that concrete classes do not have deferred definitions * that are not implemented in a subclass. @@ -272,8 +294,11 @@ object RefChecks { * TODO check that classes are not overridden * TODO This still needs to be cleaned up; the current version is a straight port of what was there * before, but it looks too complicated and method bodies are far too large. + * + * @param makeOverridePairsChecker A function for creating a OverridePairsChecker instance + * from the class symbol and the self type */ - def checkAllOverrides(clazz: ClassSymbol)(using Context): Unit = { + def checkAllOverrides(clazz: ClassSymbol, makeOverridingPairsChecker: ((ClassSymbol, Type) => Context ?=> OverridingPairsChecker) | Null = null)(using Context): Unit = { val self = clazz.thisType val upwardsSelf = upwardsThisType(clazz) var hasErrors = false @@ -299,25 +324,22 @@ object RefChecks { report.error(msg.append(othersMsg), clazz.srcPos) } - def infoString(sym: Symbol) = infoString0(sym, sym.owner != clazz) - def infoStringWithLocation(sym: Symbol) = infoString0(sym, true) - - def infoString0(sym: Symbol, showLocation: Boolean) = { - val sym1 = sym.underlyingSymbol - def info = self.memberInfo(sym1) - val infoStr = - if (sym1.isAliasType) i", which equals ${info.bounds.hi}" - else if (sym1.isAbstractOrParamType && info != TypeBounds.empty) i" with bounds$info" - else if (sym1.is(Module)) "" - else if (sym1.isTerm) i" of type $info" - else "" - i"${if (showLocation) sym1.showLocated else sym1}$infoStr" - } + def infoString(sym: Symbol) = + err.infoString(sym, self, showLocation = sym.owner != clazz) + def infoStringWithLocation(sym: Symbol) = + err.infoString(sym, self, showLocation = true) + + def isInheritedAccessor(mbr: Symbol, other: Symbol): Boolean = + mbr.is(ParamAccessor) + && { + val next = ParamForwarding.inheritedAccessor(mbr) + next == other || isInheritedAccessor(next, other) + } /* Check that all conditions for overriding `other` by `member` - * of class `clazz` are met. - */ - def checkOverride(member: Symbol, other: Symbol): Unit = + * of class `clazz` are met. + */ + def checkOverride(checkSubType: (Type, Type) => Context ?=> Boolean, member: Symbol, other: Symbol): Unit = def memberTp(self: Type) = if (member.isClass) TypeAlias(member.typeRef.EtaExpand(member.typeParams)) else self.memberInfo(member) @@ -327,27 +349,17 @@ object RefChecks { def noErrorType = !memberTp(self).isErroneous && !otherTp(self).isErroneous - def overrideErrorMsg(msg: String, compareTypes: Boolean = false): Message = { - val isConcreteOverAbstract = - (other.owner isSubClass member.owner) && other.is(Deferred) && !member.is(Deferred) - val addendum = - if isConcreteOverAbstract then - ";\n (Note that %s is abstract,\n and is therefore overridden by concrete %s)".format( - infoStringWithLocation(other), - infoStringWithLocation(member)) - else "" - val fullMsg = - s"error overriding ${infoStringWithLocation(other)};\n ${infoString(member)} $msg$addendum" - if compareTypes then OverrideTypeMismatchError(fullMsg, memberTp(self), otherTp(self)) - else OverrideError(fullMsg) - } + def overrideErrorMsg(core: Context ?=> String, compareTypes: Boolean = false): Message = + val (mtp, otp) = if compareTypes then (memberTp(self), otherTp(self)) else (NoType, NoType) + OverrideError(core, self, member, other, mtp, otp) def compatTypes(memberTp: Type, otherTp: Type): Boolean = try isOverridingPair(member, memberTp, other, otherTp, fallBack = warnOnMigration( overrideErrorMsg("no longer has compatible type"), - (if (member.owner == clazz) member else clazz).srcPos, version = `3.0`)) + (if (member.owner == clazz) member else clazz).srcPos, version = `3.0`), + isSubType = checkSubType) catch case ex: MissingType => // can happen when called with upwardsSelf as qualifier of memberTp and otherTp, // because in that case we might access types that are not members of the qualifier. @@ -359,7 +371,16 @@ object RefChecks { * Type members are always assumed to match. */ def trueMatch: Boolean = - member.isType || memberTp(self).matches(otherTp(self)) + member.isType || withMode(Mode.IgnoreCaptures) { + // `matches` does not perform box adaptation so the result here would be + // spurious during capture checking. + // + // Instead of parameterizing `matches` with the function for subtype checking + // with box adaptation, we simply ignore capture annotations here. + // This should be safe since the compatibility under box adaptation is already + // checked. + memberTp(self).matches(otherTp(self)) + } def emitOverrideError(fullmsg: Message) = if (!(hasErrors && member.is(Synthetic) && member.is(Module))) { @@ -376,7 +397,7 @@ object RefChecks { def overrideDeprecation(what: String, member: Symbol, other: Symbol, fix: String): Unit = report.deprecationWarning( - s"overriding $what${infoStringWithLocation(other)} is deprecated;\n ${infoString(member)} should be $fix.", + em"overriding $what${infoStringWithLocation(other)} is deprecated;\n ${infoString(member)} should be $fix.", if member.owner == clazz then member.srcPos else clazz.srcPos) def autoOverride(sym: Symbol) = @@ -462,7 +483,7 @@ object RefChecks { if (autoOverride(member) || other.owner.isAllOf(JavaInterface) && warnOnMigration( - "`override` modifier required when a Java 8 default method is re-implemented", + em"`override` modifier required when a Java 8 default method is re-implemented", member.srcPos, version = `3.0`)) member.setFlag(Override) else if (member.isType && self.memberInfo(member) =:= self.memberInfo(other)) @@ -494,7 +515,7 @@ object RefChecks { else if (member.is(ModuleVal) && !other.isRealMethod && !other.isOneOf(DeferredOrLazy)) overrideError("may not override a concrete non-lazy value") else if (member.is(Lazy, butNot = Module) && !other.isRealMethod && !other.is(Lazy) && - !warnOnMigration(overrideErrorMsg("may not override a non-lazy value"), member.srcPos, version = `3.0`)) + !warnOnMigration(overrideErrorMsg("may not override a non-lazy value"), member.srcPos, version = `3.0`)) overrideError("may not override a non-lazy value") else if (other.is(Lazy) && !other.isRealMethod && !member.is(Lazy)) overrideError("must be declared lazy to override a lazy value") @@ -514,13 +535,21 @@ object RefChecks { overrideError(i"needs to be declared with @targetName(${"\""}${other.targetName}${"\""}) so that external names match") else overrideError("cannot have a @targetName annotation since external names would be different") + else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.13) + if sourceVersion.isAtLeast(`future`) then + overrideError(i"cannot override val parameter ${other.showLocated}") + else + report.deprecationWarning( + em"overriding val parameter ${other.showLocated} is deprecated, will be illegal in a future version", + member.srcPos) else if !other.isExperimental && member.hasAnnotation(defn.ExperimentalAnnot) then // (1.12) overrideError("may not override non-experimental member") else if other.hasAnnotation(defn.DeprecatedOverridingAnnot) then overrideDeprecation("", member, other, "removed or renamed") end checkOverride - OverridingPairsChecker(clazz, self).checkAll(checkOverride) + val checker = if makeOverridingPairsChecker == null then OverridingPairsChecker(clazz, self) else makeOverridingPairsChecker(clazz, self) + checker.checkAll(checkOverride) printMixinOverrideErrors() // Verifying a concrete class has nothing unimplemented. @@ -528,7 +557,7 @@ object RefChecks { val abstractErrors = new mutable.ListBuffer[String] def abstractErrorMessage = // a little formatting polish - if (abstractErrors.size <= 2) abstractErrors mkString " " + if (abstractErrors.size <= 2) abstractErrors.mkString(" ") else abstractErrors.tail.mkString(abstractErrors.head + ":\n", "\n", "") def abstractClassError(mustBeMixin: Boolean, msg: String): Unit = { @@ -564,7 +593,7 @@ object RefChecks { clazz.nonPrivateMembersNamed(mbr.name) .filterWithPredicate( impl => isConcrete(impl.symbol) - && mbrDenot.matchesLoosely(impl, alwaysCompareTypes = true)) + && withMode(Mode.IgnoreCaptures)(mbrDenot.matchesLoosely(impl, alwaysCompareTypes = true))) .exists /** The term symbols in this class and its baseclasses that are @@ -711,7 +740,7 @@ object RefChecks { def checkNoAbstractDecls(bc: Symbol): Unit = { for (decl <- bc.info.decls) if (decl.is(Deferred)) { - val impl = decl.matchingMember(clazz.thisType) + val impl = withMode(Mode.IgnoreCaptures)(decl.matchingMember(clazz.thisType)) if (impl == NoSymbol || decl.owner.isSubClass(impl.owner)) && !ignoreDeferred(decl) then @@ -758,17 +787,19 @@ object RefChecks { // For each member, check that the type of its symbol, as seen from `self` // can override the info of this member - for (name <- membersToCheck) - for (mbrd <- self.member(name).alternatives) { - val mbr = mbrd.symbol - val mbrType = mbr.info.asSeenFrom(self, mbr.owner) - if (!mbrType.overrides(mbrd.info, relaxedCheck = false, matchLoosely = true)) - report.errorOrMigrationWarning( - em"""${mbr.showLocated} is not a legal implementation of `$name` in $clazz - | its type $mbrType - | does not conform to ${mbrd.info}""", - (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) + withMode(Mode.IgnoreCaptures) { + for (name <- membersToCheck) + for (mbrd <- self.member(name).alternatives) { + val mbr = mbrd.symbol + val mbrType = mbr.info.asSeenFrom(self, mbr.owner) + if (!mbrType.overrides(mbrd.info, relaxedCheck = false, matchLoosely = true)) + report.errorOrMigrationWarning( + em"""${mbr.showLocated} is not a legal implementation of `$name` in $clazz + | its type $mbrType + | does not conform to ${mbrd.info}""", + (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) } + } } /** Check that inheriting a case class does not constitute a variant refinement @@ -780,7 +811,7 @@ object RefChecks { for (baseCls <- caseCls.info.baseClasses.tail) if (baseCls.typeParams.exists(_.paramVarianceSign != 0)) for (problem <- variantInheritanceProblems(baseCls, caseCls, "non-variant", "case ")) - report.errorOrMigrationWarning(problem(), clazz.srcPos, from = `3.0`) + report.errorOrMigrationWarning(problem, clazz.srcPos, from = `3.0`) checkNoAbstractMembers() if (abstractErrors.isEmpty) checkNoAbstractDecls(clazz) @@ -811,7 +842,7 @@ object RefChecks { if cls.paramAccessors.nonEmpty && !mixins.contains(cls) problem <- variantInheritanceProblems(cls, clazz.asClass.superClass, "parameterized", "super") } - report.error(problem(), clazz.srcPos) + report.error(problem, clazz.srcPos) } checkParameterizedTraitsOK() @@ -825,13 +856,13 @@ object RefChecks { * Return an optional by name error message if this test fails. */ def variantInheritanceProblems( - baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[() => String] = { + baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[Message] = { val superBT = self.baseType(middle) val thisBT = self.baseType(baseCls) val combinedBT = superBT.baseType(baseCls) if (combinedBT =:= thisBT) None // ok else - Some(() => + Some( em"""illegal inheritance: $clazz inherits conflicting instances of $baseStr base $baseCls. | | Direct basetype: $thisBT @@ -928,7 +959,7 @@ object RefChecks { for bc <- cls.baseClasses.tail do val other = sym.matchingDecl(bc, cls.thisType) if other.exists then - report.error(i"private $sym cannot override ${other.showLocated}", sym.srcPos) + report.error(em"private $sym cannot override ${other.showLocated}", sym.srcPos) end checkNoPrivateOverrides /** Check that unary method definition do not receive parameters. @@ -1132,8 +1163,7 @@ class RefChecks extends MiniPhase { thisPhase => checkAllOverrides(cls) checkImplicitNotFoundAnnotation.template(cls.classDenot) tree - } - catch { + } catch { case ex: TypeError => report.error(ex, tree.srcPos) tree diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index e3f5382ecad7..103961b68c29 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -28,26 +28,31 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): private type SpecialHandlers = List[(ClassSymbol, SpecialHandler)] val synthesizedClassTag: SpecialHandler = (formal, span) => + def instArg(tp: Type): Type = tp.stripTypeVar match + // Special case to avoid instantiating `Int & S` to `Int & Nothing` in + // i16328.scala. The intersection comes from an earlier instantiation + // to an upper bound. + // The dual situation with unions is harder to trigger because lower + // bounds are usually widened during instantiation. + case tp: AndOrType if tp.tp1 =:= tp.tp2 => + instArg(tp.tp1) + case _ => + if isFullyDefined(tp, ForceDegree.all) then tp + else NoType // this happens in tests/neg/i15372.scala + val tag = formal.argInfos match - case arg :: Nil if isFullyDefined(arg, ForceDegree.all) => - arg match + case arg :: Nil => + instArg(arg) match case defn.ArrayOf(elemTp) => val etag = typer.inferImplicitArg(defn.ClassTagClass.typeRef.appliedTo(elemTp), span) if etag.tpe.isError then EmptyTree else etag.select(nme.wrap) - case tp if hasStableErasure(tp) && !defn.isBottomClassAfterErasure(tp.typeSymbol) => + case tp if hasStableErasure(tp) && !tp.isBottomTypeAfterErasure => val sym = tp.typeSymbol val classTagModul = ref(defn.ClassTagModule) if defn.SpecialClassTagClasses.contains(sym) then classTagModul.select(sym.name.toTermName).withSpan(span) else - def clsOfType(tp: Type): Type = tp.dealias.underlyingMatchType match - case matchTp: MatchType => - matchTp.alternatives.map(clsOfType) match - case ct1 :: cts if cts.forall(ct1 == _) => ct1 - case _ => NoType - case _ => - escapeJavaArray(erasure(tp)) - val ctype = clsOfType(tp) + val ctype = escapeJavaArray(erasure(tp)) if ctype.exists then classTagModul.select(nme.apply) .appliedToType(tp) @@ -98,12 +103,12 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): def functionTypeEqual(baseFun: Type, actualArgs: List[Type], actualRet: Type, expected: Type) = expected =:= defn.FunctionOf(actualArgs, actualRet, - defn.isContextFunctionType(baseFun), defn.isErasedFunctionType(baseFun)) + defn.isContextFunctionType(baseFun)) val arity: Int = - if defn.isErasedFunctionType(fun) || defn.isErasedFunctionType(fun) then -1 // TODO support? + if defn.isErasedFunctionType(fun) then -1 // TODO support? else if defn.isFunctionType(fun) then // TupledFunction[(...) => R, ?] - fun.dropDependentRefinement.dealias.argInfos match + fun.functionArgInfos match case funArgs :+ funRet if functionTypeEqual(fun, defn.tupleType(funArgs) :: Nil, funRet, tupled) => // TupledFunction[(...funArgs...) => funRet, ?] @@ -111,7 +116,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case _ => -1 else if defn.isFunctionType(tupled) then // TupledFunction[?, (...) => R] - tupled.dropDependentRefinement.dealias.argInfos match + tupled.functionArgInfos match case tupledArgs :: funRet :: Nil => defn.tupleTypes(tupledArgs.dealias) match case Some(funArgs) if functionTypeEqual(tupled, funArgs, funRet, fun) => @@ -476,8 +481,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val elemLabels = cls.children.map(c => ConstantType(Constant(c.name.toString))) def internalError(msg: => String)(using Context): Unit = - report.error(i"""Internal error when synthesizing sum mirror for $cls: - |$msg""".stripMargin, ctx.source.atSpan(span)) + report.error(em"""Internal error when synthesizing sum mirror for $cls: + |$msg""", ctx.source.atSpan(span)) def childPrefix(child: Symbol)(using Context): Type = val symPre = TypeOps.childPrefix(pre, cls, child) @@ -691,10 +696,11 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val manifest = synthesize(fullyDefinedType(arg, "Manifest argument", ctx.source.atSpan(span)), kind, topLevel = true) if manifest != EmptyTree then report.deprecationWarning( - i"""Compiler synthesis of Manifest and OptManifest is deprecated, instead - |replace with the type `scala.reflect.ClassTag[$arg]`. - |Alternatively, consider using the new metaprogramming features of Scala 3, - |see https://docs.scala-lang.org/scala3/reference/metaprogramming.html""", ctx.source.atSpan(span)) + em"""Compiler synthesis of Manifest and OptManifest is deprecated, instead + |replace with the type `scala.reflect.ClassTag[$arg]`. + |Alternatively, consider using the new metaprogramming features of Scala 3, + |see https://docs.scala-lang.org/scala3/reference/metaprogramming.html""", + ctx.source.atSpan(span)) withNoErrors(manifest) case _ => EmptyTreeNoError diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index c8b01b3407b7..98e9cb638c17 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -31,8 +31,9 @@ trait TypeAssigner { c case _ => report.error( - if (qual.isEmpty) tree.show + " can be used only in a class, object, or template" - else qual.show + " is not an enclosing class", tree.srcPos) + if qual.isEmpty then em"$tree can be used only in a class, object, or template" + else em"$qual is not an enclosing class", + tree.srcPos) NoSymbol } } @@ -127,7 +128,7 @@ trait TypeAssigner { def arrayElemType = qual1.tpe.widen match case JavaArrayType(elemtp) => elemtp case qualType => - report.error("Expected Array but was " + qualType.show, tree.srcPos) + report.error(em"Expected Array but was $qualType", tree.srcPos) defn.NothingType val name = tree.name @@ -167,26 +168,13 @@ trait TypeAssigner { case _ => false def addendum = err.selectErrorAddendum(tree, qual, qualType, importSuggestionAddendum, foundWithoutNull) val msg: Message = - if tree.name == nme.CONSTRUCTOR then ex"$qualType does not have a constructor" + if tree.name == nme.CONSTRUCTOR then em"$qualType does not have a constructor" else NotAMember(qualType, tree.name, kind, addendum) errorType(msg, tree.srcPos) def inaccessibleErrorType(tpe: NamedType, superAccess: Boolean, pos: SrcPos)(using Context): Type = - val pre = tpe.prefix - val name = tpe.name - val alts = tpe.denot.alternatives.map(_.symbol).filter(_.exists) - val whatCanNot = alts match - case Nil => - em"$name cannot" - case sym :: Nil => - em"${if (sym.owner == pre.typeSymbol) sym.show else sym.showLocated} cannot" - case _ => - em"none of the overloaded alternatives named $name can" - val where = if (ctx.owner.exists) s" from ${ctx.owner.enclosingClass}" else "" - val whyNot = new StringBuffer - alts.foreach(_.isAccessibleFrom(pre, superAccess, whyNot)) if tpe.isError then tpe - else errorType(ex"$whatCanNot be accessed as a member of $pre$where.$whyNot", pos) + else errorType(CannotBeAccessed(tpe, superAccess), pos) def processAppliedType(tree: untpd.Tree, tp: Type)(using Context): Type = tp match case AppliedType(tycon, args) => @@ -238,7 +226,7 @@ trait TypeAssigner { val cls = qualifyingClass(tree, tree.qual.name, packageOK = false) tree.withType( if (cls.isClass) cls.thisType - else errorType("not a legal qualifying class for this", tree.srcPos)) + else errorType(em"not a legal qualifying class for this", tree.srcPos)) } def superType(qualType: Type, mix: untpd.Ident, mixinClass: Symbol, pos: SrcPos)(using Context) = @@ -252,10 +240,10 @@ trait TypeAssigner { case Nil => errorType(SuperQualMustBeParent(mix, cls), pos) case p :: q :: _ => - errorType("ambiguous parent class qualifier", pos) + errorType(em"ambiguous parent class qualifier", pos) } val owntype = - if (mixinClass.exists) mixinClass.appliedRef + if (mixinClass.exists) mixinClass.typeRef else if (!mix.isEmpty) findMixinSuper(cls.info) else if (ctx.erasedTypes) cls.info.firstParent.typeConstructor else { @@ -291,25 +279,25 @@ trait TypeAssigner { def safeSubstMethodParams(mt: MethodType, argTypes: List[Type])(using Context): Type = if mt.isResultDependent then safeSubstParams(mt.resultType, mt.paramRefs, argTypes) - else if mt.isCaptureDependent then mt.resultType.substParams(mt, argTypes) else mt.resultType def assignType(tree: untpd.Apply, fn: Tree, args: List[Tree])(using Context): Apply = { val ownType = fn.tpe.widen match { case fntpe: MethodType => - if (fntpe.paramInfos.hasSameLengthAs(args) || ctx.phase.prev.relaxedTyping) - safeSubstMethodParams(fntpe, args.tpes) + if fntpe.paramInfos.hasSameLengthAs(args) || ctx.phase.prev.relaxedTyping then + if fntpe.isResultDependent then safeSubstMethodParams(fntpe, args.tpes) + else fntpe.resultType // fast path optimization else - errorType(i"wrong number of arguments at ${ctx.phase.prev} for $fntpe: ${fn.tpe}, expected: ${fntpe.paramInfos.length}, found: ${args.length}", tree.srcPos) + errorType(em"wrong number of arguments at ${ctx.phase.prev} for $fntpe: ${fn.tpe}, expected: ${fntpe.paramInfos.length}, found: ${args.length}", tree.srcPos) case t => if (ctx.settings.Ydebug.value) new FatalError("").printStackTrace() - errorType(err.takesNoParamsStr(fn, ""), tree.srcPos) + errorType(err.takesNoParamsMsg(fn, ""), tree.srcPos) } ConstFold.Apply(tree.withType(ownType)) } def assignType(tree: untpd.TypeApply, fn: Tree, args: List[Tree])(using Context): TypeApply = { - def fail = tree.withType(errorType(err.takesNoParamsStr(fn, "type "), tree.srcPos)) + def fail = tree.withType(errorType(err.takesNoParamsMsg(fn, "type "), tree.srcPos)) ConstFold(fn.tpe.widen match { case pt: TypeLambda => tree.withType { diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 17271618cf09..16b256e69059 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -44,6 +44,7 @@ import config.Feature import config.Feature.{sourceVersion, migrateTo3} import config.SourceVersion._ import rewrites.Rewrites.patch +import staging.StagingLevel import transform.SymUtils._ import transform.TypeUtils._ import reporting._ @@ -73,12 +74,6 @@ object Typer { /** An attachment for GADT constraints that were inferred for a pattern. */ val InferredGadtConstraints = new Property.StickyKey[core.GadtConstraint] - /** A context property that indicates the owner of any expressions to be typed in the context - * if that owner is different from the context's owner. Typically, a context with a class - * as owner would have a local dummy as ExprOwner value. - */ - private val ExprOwner = new Property.Key[Symbol] - /** An attachment on a Select node with an `apply` field indicating that the `apply` * was inserted by the Typer. */ @@ -250,15 +245,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer imp.importSym.info match case ImportType(expr) => val pre = expr.tpe - var denot = pre.memberBasedOnFlags(name, required, excluded) + val denot0 = pre.memberBasedOnFlags(name, required, excluded) .accessibleFrom(pre)(using refctx) // Pass refctx so that any errors are reported in the context of the // reference instead of the context of the import scope - if denot.exists then - if checkBounds then - denot = denot.filterWithPredicate { mbr => - mbr.matchesImportBound(if mbr.symbol.is(Given) then imp.givenBound else imp.wildcardBound) - } + if denot0.exists then + val denot = + if checkBounds then + denot0.filterWithPredicate { mbr => + mbr.matchesImportBound(if mbr.symbol.is(Given) then imp.givenBound else imp.wildcardBound) + } + else denot0 def isScalaJsPseudoUnion = denot.name == tpnme.raw.BAR && ctx.settings.scalajs.value && denot.symbol == JSDefinitions.jsdefn.PseudoUnionClass // Just like Scala2Unpickler reinterprets Scala.js pseudo-unions @@ -376,6 +373,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case denot => !denot.hasAltWith(isCurrent) def checkNoOuterDefs(denot: Denotation, last: Context, prevCtx: Context): Unit = + def sameTermOrType(d1: SingleDenotation, d2: Denotation) = + d2.containsSym(d1.symbol) || d2.hasUniqueSym && { + val sym1 = d1.symbol + val sym2 = d2.symbol + if sym1.isTerm then + sym1.isStableMember && + sym2.isStableMember && + sym1.termRef =:= sym2.termRef + else + (sym1.isAliasType || sym2.isAliasType) && d1.info =:= d2.info + } val outer = last.outer val owner = outer.owner if (owner eq last.owner) && (outer.scope eq last.scope) then @@ -385,7 +393,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val competing = scope.denotsNamed(name).filterWithFlags(required, excluded) if competing.exists then val symsMatch = competing - .filterWithPredicate(sd => denot.containsSym(sd.symbol)) + .filterWithPredicate(sd => sameTermOrType(sd, denot)) .exists if !symsMatch && !suppressErrors then report.errorOrMigrationWarning( @@ -476,13 +484,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * (x: T | Null) => x.$asInstanceOf$[x.type & T] */ def toNotNullTermRef(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match - case ref @ OrNull(tpnn) : TermRef + case ref: TermRef if pt != AssignProto && // Ensure it is not the lhs of Assign ctx.notNullInfos.impliesNotNull(ref) && // If a reference is in the context, it is already trackable at the point we add it. // Hence, we don't use isTracked in the next line, because checking use out of order is enough. !ref.usedOutOfOrder => - tree.cast(AndType(ref, tpnn)) + ref match + case OrNull(tpnn) => tree.cast(AndType(ref, tpnn)) + case _ => tree case _ => tree @@ -525,7 +535,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val found = findRef(name, pt, EmptyFlags, EmptyFlags, tree.srcPos) if foundUnderScala2.exists && !(foundUnderScala2 =:= found) then report.migrationWarning( - ex"""Name resolution will change. + em"""Name resolution will change. | currently selected : $foundUnderScala2 | in the future, without -source 3.0-migration: $found""", tree.srcPos) foundUnderScala2 @@ -534,22 +544,40 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer unimported = saved1 foundUnderScala2 = saved2 - def checkNotShadowed(ownType: Type) = ownType match - case ownType: TermRef if ownType.symbol.is(ConstructorProxy) => - val shadowed = findRef(name, pt, EmptyFlags, ConstructorProxy, tree.srcPos) - if shadowed.exists then - report.error( - em"""Reference to constructor proxy for ${ownType.symbol.companionClass.showLocated} - |shadows outer reference to ${shadowed.termSymbol.showLocated}""", tree.srcPos) - case _ => + /** Normally, returns `ownType` except if `ownType` is a constructor proxy, + * and there is another shadowed type accessible with the same name that is not: + * - if the prototype is an application: + * - if the shadowed type has a method alternative or an apply method, + * issue an ambiguity error + * - otherwise again return `ownType` + * - if the prototype is not an application, return the shadowed type + */ + def checkNotShadowed(ownType: Type): Type = + ownType match + case ownType: TermRef if ownType.symbol.is(ConstructorProxy) => + findRef(name, pt, EmptyFlags, ConstructorProxy, tree.srcPos) match + case shadowed: TermRef => + pt match + case pt: FunOrPolyProto => + def err(shadowedIsApply: Boolean) = + report.error(ConstrProxyShadows(ownType, shadowed, shadowedIsApply), tree.srcPos) + if shadowed.denot.hasAltWith(sd => sd.symbol.is(Method, butNot = Accessor)) then + err(shadowedIsApply = false) + else if shadowed.member(nme.apply).hasAltWith(_.symbol.is(Method, butNot = Accessor)) then + err(shadowedIsApply = true) + case _ => + return shadowed + case shadowed => + case _ => + ownType def setType(ownType: Type): Tree = - checkNotShadowed(ownType) - val tree1 = ownType match - case ownType: NamedType if !prefixIsElidable(ownType) => - ref(ownType).withSpan(tree.span) + val checkedType = checkNotShadowed(ownType) + val tree1 = checkedType match + case checkedType: NamedType if !prefixIsElidable(checkedType) => + ref(checkedType).withSpan(tree.span) case _ => - tree.withType(ownType) + tree.withType(checkedType) val tree2 = toNotNullTermRef(tree1, pt) checkLegalValue(tree2, pt) tree2 @@ -584,7 +612,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else if ctx.owner.isConstructor && !ctx.owner.isPrimaryConstructor && ctx.owner.owner.unforcedDecls.lookup(tree.name).exists then // we are in the arguments of a this(...) constructor call - errorTree(tree, ex"$tree is not accessible from constructor arguments") + errorTree(tree, em"$tree is not accessible from constructor arguments") else errorTree(tree, MissingIdent(tree, kind, name)) end typedIdent @@ -609,11 +637,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val superAccess = qual.isInstanceOf[Super] val rawType = selectionType(tree, qual) val checkedType = accessibleType(rawType, superAccess) - if checkedType.exists then + + def finish(tree: untpd.Select, qual: Tree, checkedType: Type): Tree = val select = toNotNullTermRef(assignType(tree, checkedType), pt) if selName.isTypeName then checkStable(qual.tpe, qual.srcPos, "type prefix") checkLegalValue(select, pt) ConstFold(select) + + if checkedType.exists then + finish(tree, qual, checkedType) else if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then // Simplify `m.apply(...)` to `m(...)` qual @@ -622,9 +654,35 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, // but that is done only after we search for extension methods or conversions. typedSelect(tree, pt, qual) + else if defn.isSmallGenericTuple(qual.tpe) then + val elems = defn.tupleTypes(qual.tpe.widenTermRefExpr).getOrElse(Nil) + typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) else val tree1 = tryExtensionOrConversion( tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) + .orElse { + if ctx.gadt.isNarrowing then + // try GADT approximation if we're trying to select a member + // Member lookup cannot take GADTs into account b/c of cache, so we + // approximate types based on GADT constraints instead. For an example, + // see MemberHealing in gadt-approximation-interaction.scala. + val wtp = qual.tpe.widen + gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") + val gadtApprox = Inferencing.approximateGADT(wtp) + gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") + val qual1 = qual.cast(gadtApprox) + val tree1 = cpy.Select(tree0)(qual1, selName) + val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) + if checkedType1.exists then + gadts.println(i"Member selection healed by GADT approximation") + finish(tree1, qual1, checkedType1) + else if defn.isSmallGenericTuple(qual1.tpe) then + gadts.println(i"Tuple member selection healed by GADT approximation") + typedSelect(tree, pt, qual1) + else + tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) + else EmptyTree + } if !tree1.isEmpty then tree1 else if canDefineFurther(qual.tpe.widen) then @@ -673,7 +731,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer javaSelectOnType(qual2) case _ => - errorTree(tree, "cannot convert to type selection") // will never be printed due to fallback + errorTree(tree, em"cannot convert to type selection") // will never be printed due to fallback } def selectWithFallback(fallBack: Context ?=> Tree) = @@ -804,14 +862,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer isSkolemFree(pt) && isEligible(pt.underlyingClassRef(refinementOK = false))) templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) - templ1.parents foreach { - case parent: RefTree => - typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) - case _ => - } - val x = tpnme.ANON_CLASS - val clsDef = TypeDef(x, templ1).withFlags(Final | Synthetic) - typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(x), Nil)), pt) + for case parent: RefTree <- templ1.parents do + typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) + val anon = tpnme.ANON_CLASS + val clsDef = TypeDef(anon, templ1).withFlags(Final | Synthetic) + typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(anon), Nil)), pt) case _ => var tpt1 = typedType(tree.tpt) val tsym = tpt1.tpe.underlyingClassRef(refinementOK = false).typeSymbol @@ -987,8 +1042,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def lhs1 = adapt(lhsCore, AssignProto, locked) def reassignmentToVal = - errorTree(cpy.Assign(tree)(lhsCore, typed(tree.rhs, lhs1.tpe.widen)), - ReassignmentToVal(lhsCore.symbol.name)) + report.error(ReassignmentToVal(lhsCore.symbol.name), tree.srcPos) + cpy.Assign(tree)(lhsCore, typed(tree.rhs, lhs1.tpe.widen)).withType(defn.UnitType) def canAssign(sym: Symbol) = sym.is(Mutable, butNot = Accessor) || @@ -1066,6 +1121,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val (stats1, exprCtx) = withoutMode(Mode.Pattern) { typedBlockStats(tree.stats) } + var expr1 = typedExpr(tree.expr, pt.dropIfProto)(using exprCtx) // If unsafe nulls is enabled inside a block but not enabled outside @@ -1184,8 +1240,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ) end typedIf - /** Decompose function prototype into a list of parameter prototypes and a result prototype - * tree, using WildcardTypes where a type is not known. + /** Decompose function prototype into a list of parameter prototypes and a result + * prototype tree, using WildcardTypes where a type is not known. + * Note: parameter prototypes may be TypeBounds. * For the result type we do this even if the expected type is not fully * defined, which is a bit of a hack. But it's needed to make the following work * (see typers.scala and printers/PlainPrinter.scala for examples). @@ -1210,8 +1267,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && defn.isContextFunctionType(pt1.nonPrivateMember(nme.apply).info.finalResultType) then report.error( - i"""Implementation restriction: Expected result type $pt1 - |is a curried dependent context function type. Such types are not yet supported.""", + em"""Implementation restriction: Expected result type $pt1 + |is a curried dependent context function type. Such types are not yet supported.""", pos) pt1 match { case tp: TypeParamRef => @@ -1221,9 +1278,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // if expected parameter type(s) are wildcards, approximate from below. // if expected result type is a wildcard, approximate from above. // this can type the greatest set of admissible closures. - (pt1.argTypesLo.init, typeTree(interpolateWildcards(pt1.argTypesHi.last))) + + (pt1.argInfos.init, typeTree(interpolateWildcards(pt1.argInfos.last.hiBound))) case RefinedType(parent, nme.apply, mt @ MethodTpe(_, formals, restpe)) - if defn.isNonRefinedFunction(parent) && formals.length == defaultArity => + if (defn.isNonRefinedFunction(parent) || defn.isErasedFunctionType(parent)) && formals.length == defaultArity => (formals, untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef)))) case SAMType(mt @ MethodTpe(_, formals, restpe)) => (formals, @@ -1254,20 +1312,22 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * If both attempts fail, return `NoType`. */ def inferredFromTarget( - param: untpd.ValDef, formal: Type, calleeType: Type, paramIndex: Name => Int)(using Context): Type = + param: untpd.ValDef, formal: Type, calleeType: Type, isErased: Boolean, paramIndex: Name => Int)(using Context): Type = val target = calleeType.widen match case mtpe: MethodType => val pos = paramIndex(param.name) if pos < mtpe.paramInfos.length then - mtpe.paramInfos(pos) + val tp = mtpe.paramInfos(pos) // This works only if vararg annotations match up. // See neg/i14367.scala for an example where the inferred type is mispredicted. // Nevertheless, the alternative would be to give up completely, so this is // defensible. + // Strip inferred erased annotation, to avoid accidentally inferring erasedness + if !isErased then tp.stripAnnots(_.symbol != defn.ErasedParamAnnot) else tp else NoType case _ => NoType if target.exists then formal <:< target - if isFullyDefined(formal, ForceDegree.flipBottom) then formal + if !formal.isExactlyNothing && isFullyDefined(formal, ForceDegree.flipBottom) then formal else if target.exists && isFullyDefined(target, ForceDegree.flipBottom) then target else NoType @@ -1277,32 +1337,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedFunctionType(tree: untpd.Function, pt: Type)(using Context): Tree = { val untpd.Function(args, body) = tree - var funFlags = tree match { - case tree: untpd.FunctionWithMods => tree.mods.flags - case _ => EmptyFlags + var (funFlags, erasedParams) = tree match { + case tree: untpd.FunctionWithMods => (tree.mods.flags, tree.erasedParams) + case _ => (EmptyFlags, args.map(_ => false)) } - assert(!funFlags.is(Erased) || !args.isEmpty, "An empty function cannot not be erased") - val numArgs = args.length val isContextual = funFlags.is(Given) - val isErased = funFlags.is(Erased) val isImpure = funFlags.is(Impure) - val funSym = defn.FunctionSymbol(numArgs, isContextual, isErased, isImpure) - - /** If `app` is a function type with arguments that are all erased classes, - * turn it into an erased function type. - */ - def propagateErased(app: Tree): Tree = app match - case AppliedTypeTree(tycon: TypeTree, args) - if !isErased - && numArgs > 0 - && args.indexWhere(!_.tpe.isErasedClass) == numArgs => - val tycon1 = TypeTree(defn.FunctionSymbol(numArgs, isContextual, true, isImpure).typeRef) - .withSpan(tycon.span) - assignType(cpy.AppliedTypeTree(app)(tycon1, args), tycon1, args) - case _ => - app /** Typechecks dependent function type with given parameters `params` */ def typedDependent(params: List[untpd.ValDef])(using Context): Tree = @@ -1317,16 +1359,29 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if funFlags.is(Given) then params.map(_.withAddedFlags(Given)) else params val params2 = params1.map(fixThis.transformSub) - val appDef0 = untpd.DefDef(nme.apply, List(params2), body, EmptyTree).withSpan(tree.span) + val params3 = params2.zipWithConserve(erasedParams) { (arg, isErased) => + if isErased then arg.withAddedFlags(Erased) else arg + } + val appDef0 = untpd.DefDef(nme.apply, List(params3), body, EmptyTree).withSpan(tree.span) index(appDef0 :: Nil) val appDef = typed(appDef0).asInstanceOf[DefDef] val mt = appDef.symbol.info.asInstanceOf[MethodType] if (mt.isParamDependent) - report.error(i"$mt is an illegal function type because it has inter-parameter dependencies", tree.srcPos) + report.error(em"$mt is an illegal function type because it has inter-parameter dependencies", tree.srcPos) + // Restart typechecking if there are erased classes that we want to mark erased + if mt.erasedParams.zip(mt.paramInfos.map(_.isErasedClass)).exists((paramErased, classErased) => classErased && !paramErased) then + val newParams = params3.zipWithConserve(mt.paramInfos.map(_.isErasedClass)) { (arg, isErasedClass) => + if isErasedClass then arg.withAddedFlags(Erased) else arg + } + return typedDependent(newParams) val resTpt = TypeTree(mt.nonDependentResultApprox).withSpan(body.span) val typeArgs = appDef.termParamss.head.map(_.tpt) :+ resTpt - val tycon = TypeTree(funSym.typeRef) - val core = propagateErased(AppliedTypeTree(tycon, typeArgs)) + val core = + if mt.hasErasedParams then TypeTree(defn.ErasedFunctionClass.typeRef) + else + val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) + val tycon = TypeTree(funSym.typeRef) + AppliedTypeTree(tycon, typeArgs) RefinedTypeTree(core, List(appDef), ctx.owner.asClass) end typedDependent @@ -1335,17 +1390,25 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedDependent(args.asInstanceOf[List[untpd.ValDef]])( using ctx.fresh.setOwner(newRefinedClassSymbol(tree.span)).setNewScope) case _ => - propagateErased( - typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args :+ body), pt)) + if erasedParams.contains(true) then + typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) + else + val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) + val result = typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args :+ body), pt) + // if there are any erased classes, we need to re-do the typecheck. + result match + case r: AppliedTypeTree if r.args.exists(_.tpe.isErasedClass) => + typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) + case _ => result } } def typedFunctionValue(tree: untpd.Function, pt: Type)(using Context): Tree = { val untpd.Function(params: List[untpd.ValDef] @unchecked, _) = tree: @unchecked - val isContextual = tree match { - case tree: untpd.FunctionWithMods => tree.mods.is(Given) - case _ => false + val (isContextual, isDefinedErased) = tree match { + case tree: untpd.FunctionWithMods => (tree.mods.is(Given), tree.erasedParams) + case _ => (false, tree.args.map(_ => false)) } /** The function body to be returned in the closure. Can become a TypedSplice @@ -1446,9 +1509,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val (protoFormals, resultTpt) = decomposeProtoFunction(pt, params.length, tree.srcPos) - def protoFormal(i: Int): Type = - if (protoFormals.length == params.length) protoFormals(i) - else errorType(WrongNumberOfParameters(protoFormals.length), tree.srcPos) + /** Returns the type and whether the parameter is erased */ + def protoFormal(i: Int): (Type, Boolean) = + if (protoFormals.length == params.length) (protoFormals(i), isDefinedErased(i)) + else (errorType(WrongNumberOfParameters(protoFormals.length), tree.srcPos), false) /** Is `formal` a product type which is elementwise compatible with `params`? */ def ptIsCorrectProduct(formal: Type) = @@ -1460,11 +1524,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } var desugared: untpd.Tree = EmptyTree - if protoFormals.length == 1 && params.length != 1 && ptIsCorrectProduct(protoFormals.head) then - val isGenericTuple = - protoFormals.head.derivesFrom(defn.TupleClass) - && !defn.isTupleClass(protoFormals.head.typeSymbol) - desugared = desugar.makeTupledFunction(params, fnBody, isGenericTuple) + if protoFormals.length == 1 && params.length != 1 then + val firstFormal = protoFormals.head.loBound + if ptIsCorrectProduct(firstFormal) then + val isGenericTuple = + firstFormal.derivesFrom(defn.TupleClass) + && !defn.isTupleClass(firstFormal.typeSymbol) + desugared = desugar.makeTupledFunction(params, fnBody, isGenericTuple) else if protoFormals.length > 1 && params.length == 1 then def isParamRef(scrut: untpd.Tree): Boolean = scrut match case untpd.Annotated(scrut1, _) => isParamRef(scrut1) @@ -1484,20 +1550,32 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if desugared.isEmpty then val inferredParams: List[untpd.ValDef] = for ((param, i) <- params.zipWithIndex) yield - if (!param.tpt.isEmpty) param - else - val formal = protoFormal(i) - val knownFormal = isFullyDefined(formal, ForceDegree.failBottom) - val paramType = - if knownFormal then formal - else inferredFromTarget(param, formal, calleeType, paramIndex) - .orElse(errorType(AnonymousFunctionMissingParamType(param, tree, formal), param.srcPos)) - val paramTpt = untpd.TypedSplice( - (if knownFormal then InferredTypeTree() else untpd.TypeTree()) - .withType(paramType.translateFromRepeated(toArray = false)) - .withSpan(param.span.endPos) - ) - cpy.ValDef(param)(tpt = paramTpt) + val (formalBounds, isErased) = protoFormal(i) + val param0 = + if (!param.tpt.isEmpty) param + else + val formal = formalBounds.loBound + val isBottomFromWildcard = (formalBounds ne formal) && formal.isExactlyNothing + val knownFormal = isFullyDefined(formal, ForceDegree.failBottom) + // If the expected formal is a TypeBounds wildcard argument with Nothing as lower bound, + // try to prioritize inferring from target. See issue 16405 (tests/run/16405.scala) + val paramType = + // Strip inferred erased annotation, to avoid accidentally inferring erasedness + val formal0 = if !isErased then formal.stripAnnots(_.symbol != defn.ErasedParamAnnot) else formal + if knownFormal && !isBottomFromWildcard then + formal0 + else + inferredFromTarget(param, formal, calleeType, isErased, paramIndex).orElse( + if knownFormal then formal0 + else errorType(AnonymousFunctionMissingParamType(param, tree, formal), param.srcPos) + ) + val paramTpt = untpd.TypedSplice( + (if knownFormal then InferredTypeTree() else untpd.TypeTree()) + .withType(paramType.translateFromRepeated(toArray = false)) + .withSpan(param.span.endPos) + ) + cpy.ValDef(param)(tpt = paramTpt) + if isErased then param0.withAddedFlags(Flags.Erased) else param0 desugared = desugar.makeClosure(inferredParams, fnBody, resultTpt, isContextual, tree.span) typed(desugared, pt) @@ -1526,17 +1604,21 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Replace the underspecified expected type by one based on the closure method type defn.PartialFunctionOf(mt.firstParamTypes.head, mt.resultType) else - report.error(ex"result type of lambda is an underspecified SAM type $pt", tree.srcPos) + report.error(em"result type of lambda is an underspecified SAM type $pt", tree.srcPos) pt TypeTree(targetTpe) case _ => if (mt.isParamDependent) errorTree(tree, - i"""cannot turn method type $mt into closure - |because it has internal parameter dependencies""") + em"""cannot turn method type $mt into closure + |because it has internal parameter dependencies""") else if ((tree.tpt `eq` untpd.ContextualEmptyTree) && mt.paramNames.isEmpty) // Note implicitness of function in target type since there are no method parameters that indicate it. - TypeTree(defn.FunctionOf(Nil, mt.resType, isContextual = true, isErased = false)) + TypeTree(defn.FunctionOf(Nil, mt.resType, isContextual = true)) + else if hasCaptureConversionArg(mt.resType) then + errorTree(tree, + em"""cannot turn method type $mt into closure + |because it has capture conversion skolem types""") else EmptyTree } @@ -1565,9 +1647,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } else { val (protoFormals, _) = decomposeProtoFunction(pt, 1, tree.srcPos) - val checkMode = - if (pt.isRef(defn.PartialFunctionClass)) desugar.MatchCheck.None - else desugar.MatchCheck.Exhaustive + val checkMode = desugar.MatchCheck.Exhaustive typed(desugar.makeCaseLambda(tree.cases, checkMode, protoFormals.length).withSpan(tree.span), pt) } case _ => @@ -1609,6 +1689,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } val result = pt match { + case mt: MatchType if isMatchTypeShaped(mt) => + typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case MatchType.InDisguise(mt) if isMatchTypeShaped(mt) => typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case _ => @@ -1648,7 +1730,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // skip exhaustivity check in later phase // TODO: move the check above to patternMatcher phase - val uncheckedTpe = AnnotatedType(sel.tpe.widen, Annotation(defn.UncheckedAnnot)) + val uncheckedTpe = AnnotatedType(sel.tpe.widen, Annotation(defn.UncheckedAnnot, tree.selector.span)) tpd.cpy.Match(result)( selector = tpd.Typed(sel, tpd.TypeTree(uncheckedTpe)), cases = result.cases @@ -1779,7 +1861,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var body1 = typedType(cdef.body, pt) if !body1.isType then assert(ctx.reporter.errorsReported) - body1 = TypeTree(errorType("", cdef.srcPos)) + body1 = TypeTree(errorType(em"", cdef.srcPos)) assignType(cpy.CaseDef(cdef)(pat2, EmptyTree, body1), pat2, body1) } caseRest(using ctx.fresh.setFreshGADTBounds.setNewScope) @@ -1877,13 +1959,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val expr1 = typed(tree.expr, defn.ThrowableType) val cap = checkCanThrow(expr1.tpe.widen, tree.span) val res = Throw(expr1).withSpan(tree.span) - if ctx.settings.Ycc.value && !cap.isEmpty && !ctx.isAfterTyper then + if Feature.ccEnabled && !cap.isEmpty && !ctx.isAfterTyper then // Record access to the CanThrow capabulity recovered in `cap` by wrapping // the type of the `throw` (i.e. Nothing) in a `@requiresCapability` annotatoon. Typed(res, TypeTree( AnnotatedType(res.tpe, - Annotation(defn.RequiresCapabilityAnnot, cap)))) + Annotation(defn.RequiresCapabilityAnnot, cap, tree.span)))) else res def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(using Context): SeqLiteral = { @@ -1927,7 +2009,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer .withType( if isFullyDefined(pt, ForceDegree.flipBottom) then pt else if ctx.reporter.errorsReported then UnspecifiedErrorType - else errorType(i"cannot infer type; expected type $pt is not fully defined", tree.srcPos)) + else errorType(em"cannot infer type; expected type $pt is not fully defined", tree.srcPos)) def typedTypeTree(tree: untpd.TypeTree, pt: Type)(using Context): Tree = tree match @@ -1941,13 +2023,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // untyped tree is no longer accessed after all // accesses with typedTypeTree are done. case None => - errorTree(tree, "Something's wrong: missing original symbol for type tree") + errorTree(tree, em"Something's wrong: missing original symbol for type tree") } case _ => completeTypeTree(InferredTypeTree(), pt, tree) def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { - val ref1 = typedExpr(tree.ref) + val ref1 = typedExpr(tree.ref, SingletonTypeProto) checkStable(ref1.tpe, tree.srcPos, "singleton type") assignType(cpy.SingletonTypeTree(tree)(ref1), ref1) } @@ -1982,9 +2064,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree.args match case arg :: _ if arg.isTerm => if Feature.dependentEnabled then - return errorTree(tree, i"Not yet implemented: T(...)") + return errorTree(tree, em"Not yet implemented: T(...)") else - return errorTree(tree, dependentStr) + return errorTree(tree, dependentMsg) case _ => val tpt1 = withoutMode(Mode.Pattern) { @@ -2094,6 +2176,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && checkedArgs(1).tpe.derivesFrom(defn.RuntimeExceptionClass) then report.error(em"throws clause cannot be defined for RuntimeException", checkedArgs(1).srcPos) + else if tycon == defn.IntoType then + // is defined in package scala but this should be hidden from user programs + report.error(em"not found: ", tpt1.srcPos) else if (ctx.isJava) if tycon eq defn.ArrayClass then checkedArgs match { @@ -2120,9 +2205,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedTermLambdaTypeTree(tree: untpd.TermLambdaTypeTree)(using Context): Tree = if Feature.dependentEnabled then - errorTree(tree, i"Not yet implemented: (...) =>> ...") + errorTree(tree, em"Not yet implemented: (...) =>> ...") else - errorTree(tree, dependentStr) + errorTree(tree, dependentMsg) def typedMatchTypeTree(tree: untpd.MatchTypeTree, pt: Type)(using Context): Tree = { val bound1 = @@ -2146,15 +2231,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val alias1 = typed(alias) val lo2 = if (lo1.isEmpty) typed(untpd.TypeTree(defn.NothingType)) else lo1 val hi2 = if (hi1.isEmpty) typed(untpd.TypeTree(defn.AnyType)) else hi1 - if !alias1.isEmpty then - val bounds = TypeBounds(lo2.tpe, hi2.tpe) - if !bounds.contains(alias1.tpe) then - report.error(em"type ${alias1.tpe} outside bounds $bounds", tree.srcPos) assignType(cpy.TypeBoundsTree(tree)(lo2, hi2, alias1), lo2, hi2, alias1) def typedBind(tree: untpd.Bind, pt: Type)(using Context): Tree = { if !isFullyDefined(pt, ForceDegree.all) then - return errorTree(tree, i"expected type of $tree is not fully defined") + return errorTree(tree, em"expected type of $tree is not fully defined") val body1 = typed(tree.body, pt) body1 match { case UnApply(fn, Nil, arg :: Nil) @@ -2220,29 +2301,23 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** The context to be used for an annotation of `mdef`. * This should be the context enclosing `mdef`, or if `mdef` defines a parameter * the context enclosing the owner of `mdef`. - * Furthermore, we need to evaluate annotation arguments in an expression context, - * since classes defined in a such arguments should not be entered into the - * enclosing class. + * Furthermore, we need to make sure that annotation trees are evaluated + * with an owner that is not the enclosing class since otherwise locally + * defined symbols would be entered as class members. */ - def annotContext(mdef: untpd.Tree, sym: Symbol)(using Context): Context = { + def annotContext(mdef: untpd.Tree, sym: Symbol)(using Context): Context = def isInner(owner: Symbol) = owner == sym || sym.is(Param) && owner == sym.owner val outer = ctx.outersIterator.dropWhile(c => isInner(c.owner)).next() - var adjusted = outer.property(ExprOwner) match { - case Some(exprOwner) if outer.owner.isClass => outer.exprContext(mdef, exprOwner) - case _ => outer - } + def local: FreshContext = outer.fresh.setOwner(newLocalDummy(sym.owner)) sym.owner.infoOrCompleter match - case completer: Namer#Completer if sym.is(Param) => - val tparams = completer.completerTypeParams(sym) - if tparams.nonEmpty then - // Create a new local context with a dummy owner and a scope containing the - // type parameters of the enclosing method or class. Thus annotations can see - // these type parameters. See i12953.scala for a test case. - val dummyOwner = newLocalDummy(sym.owner) - adjusted = adjusted.fresh.setOwner(dummyOwner).setScope(newScopeWith(tparams*)) + case completer: Namer#Completer + if sym.is(Param) && completer.completerTypeParams(sym).nonEmpty => + // Create a new local context with a dummy owner and a scope containing the + // type parameters of the enclosing method or class. Thus annotations can see + // these type parameters. See i12953.scala for a test case. + local.setScope(newScopeWith(completer.completerTypeParams(sym)*)) case _ => - adjusted - } + if outer.owner.isClass then local else outer def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(using Context): Unit = { // necessary to force annotation trees to be computed. @@ -2257,7 +2332,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedAnnotation(annot: untpd.Tree)(using Context): Tree = - checkAnnotArgs(typed(annot, defn.AnnotationClass.typeRef)) + checkAnnotClass(checkAnnotArgs(typed(annot))) def registerNowarn(tree: Tree, mdef: untpd.Tree)(using Context): Unit = val annot = Annotations.Annotation(tree) @@ -2334,7 +2409,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ctx.outer.outersIterator.takeWhile(!_.owner.is(Method)) .filter(ctx => ctx.owner.isClass && ctx.owner.typeParams.nonEmpty) .toList.reverse - .foreach(ctx => rhsCtx.gadt.addToConstraint(ctx.owner.typeParams)) + .foreach(ctx => rhsCtx.gadtState.addToConstraint(ctx.owner.typeParams)) if tparamss.nonEmpty then rhsCtx.setFreshGADTBounds @@ -2343,7 +2418,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // we're typing a polymorphic definition's body, // so we allow constraining all of its type parameters // constructors are an exception as we don't allow constraining type params of classes - rhsCtx.gadt.addToConstraint(tparamSyms) + rhsCtx.gadtState.addToConstraint(tparamSyms) else if !sym.isPrimaryConstructor then linkConstructorParams(sym, tparamSyms, rhsCtx) @@ -2354,8 +2429,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then - if StagingContext.level > 0 then + if StagingLevel.level > 0 then report.error("inline def cannot be within quotes", sym.sourcePos) + if sym.is(Given) + && untpd.stripBlock(untpd.unsplice(ddef.rhs)).isInstanceOf[untpd.Function] + then + report.warning(InlineGivenShouldNotBeFunction(), ddef.rhs.srcPos) val rhsToInline = PrepareInlineable.wrapRHS(ddef, tpt1, rhs1) PrepareInlineable.registerInlineInfo(sym, rhsToInline) @@ -2442,7 +2521,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // error if the same parent was explicitly added in user code. if !tree.span.isSourceDerived then return EmptyTree - if !ctx.isAfterTyper then report.error(i"$psym is extended twice", tree.srcPos) + if !ctx.isAfterTyper then report.error(em"$psym is extended twice", tree.srcPos) else seenParents += psym val result = ensureConstrCall(cls, parent, psym)(using superCtx) @@ -2451,6 +2530,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkSimpleKinded(parent) // allow missing type parameters if there are implicit arguments to pass // since we can infer type arguments from them + val constr = psym.primaryConstructor + if psym.is(Trait) && constr.exists && !cls.isRefinementClass then + ensureAccessible(constr.termRef, superAccess = true, tree.srcPos) else checkParentCall(result, cls) if cls is Case then @@ -2553,13 +2635,26 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // check value class constraints checkDerivedValueClass(cls, body1) + // check PolyFunction constraints (no erased functions!) + if parents1.exists(_.tpe.classSymbol eq defn.PolyFunctionClass) then + body1.foreach { + case ddef: DefDef => + ddef.paramss.foreach { params => + val erasedParam = params.collectFirst { case vdef: ValDef if vdef.symbol.is(Erased) => vdef } + erasedParam.foreach { p => + report.error(em"Implementation restriction: erased classes are not allowed in a poly function definition", p.srcPos) + } + } + case _ => + } + val effectiveOwner = cls.owner.skipWeakOwner if !cls.isRefinementClass && !cls.isAllOf(PrivateLocal) && effectiveOwner.is(Trait) && !effectiveOwner.derivesFrom(defn.ObjectClass) then - report.error(i"$cls cannot be defined in universal $effectiveOwner", cdef.srcPos) + report.error(em"$cls cannot be defined in universal $effectiveOwner", cdef.srcPos) // Temporarily set the typed class def as root tree so that we have at least some // information in the IDE in case we never reach `SetRootTree`. @@ -2593,6 +2688,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer */ def ensureConstrCall(cls: ClassSymbol, parent: Tree, psym: Symbol)(using Context): Tree = if parent.isType && !cls.is(Trait) && !cls.is(JavaDefined) && psym.isClass + // Annotations are represented as traits with constructors, but should + // never be called as such outside of annotation trees. + && !psym.is(JavaAnnotation) && (!psym.is(Trait) || psym.primaryConstructor.info.takesParams && !cls.superClass.isSubClass(psym)) then typed(untpd.New(untpd.TypedSplice(parent), Nil)) @@ -2663,17 +2761,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Package will not exist if a duplicate type has already been entered, see `tests/neg/1708.scala` errorTree(tree, if pkg.exists then PackageNameAlreadyDefined(pkg) - else i"package ${tree.pid.name} does not exist") + else em"package ${tree.pid.name} does not exist") end typedPackageDef def typedAnnotated(tree: untpd.Annotated, pt: Type)(using Context): Tree = { - val annot1 = typedExpr(tree.annot, defn.AnnotationClass.typeRef) - if Annotations.annotClass(annot1) == defn.NowarnAnnot then + val annot1 = checkAnnotClass(typedExpr(tree.annot)) + val annotCls = Annotations.annotClass(annot1) + if annotCls == defn.NowarnAnnot then registerNowarn(annot1, tree) val arg1 = typed(tree.arg, pt) if (ctx.mode is Mode.Type) { val cls = annot1.symbol.maybeOwner - if cls == defn.RetainsAnnot || cls == defn.RetainsByNameAnnot then + if Feature.ccEnabled + && (cls == defn.RetainsAnnot || cls == defn.RetainsByNameAnnot) + then CheckCaptures.checkWellformed(annot1) if arg1.isType then assignType(cpy.Annotated(tree)(arg1, annot1), arg1, annot1) @@ -2740,8 +2841,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if ((prefix ++ suffix).isEmpty) "simply leave out the trailing ` _`" else s"use `$prefix$suffix` instead" report.errorOrMigrationWarning( - i"""The syntax ` _` is no longer supported; - |you can $remedy""", + em"""The syntax ` _` is no longer supported; + |you can $remedy""", tree.srcPos, from = future) if sourceVersion.isMigrating then @@ -2813,7 +2914,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tupleXXLobj = untpd.ref(defn.TupleXXLModule.termRef) val app = untpd.cpy.Apply(tree)(tupleXXLobj, elems.map(untpd.TypedSplice(_))) .withSpan(tree.span) - val app1 = typed(app, defn.TupleXXLClass.typeRef) + val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) if (ctx.mode.is(Mode.Pattern)) app1 else { val elemTpes = elems.lazyZip(pts).map((elem, pt) => @@ -2873,7 +2974,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else "" val namePos = tree.sourcePos.withSpan(tree.nameSpan) report.errorOrMigrationWarning( - s"`?` is not a valid type name$addendum", namePos, from = `3.0`) + em"`?` is not a valid type name$addendum", namePos, from = `3.0`) if tree.isClassDef then typedClassDef(tree, sym.asClass)(using ctx.localContext(tree, sym)) else @@ -2925,7 +3026,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.TypedSplice => typedTypedSplice(tree) case tree: untpd.UnApply => typedUnApply(tree, pt) case tree: untpd.Tuple => typedTuple(tree, pt) - case tree: untpd.DependentTypeTree => completeTypeTree(untpd.TypeTree(), pt, tree) + case tree: untpd.DependentTypeTree => completeTypeTree(untpd.InferredTypeTree(), pt, tree) case tree: untpd.InfixOp => typedInfixOp(tree, pt) case tree: untpd.ParsedTry => typedTry(tree, pt) case tree @ untpd.PostfixOp(qual, Ident(nme.WILDCARD)) => typedAsFunction(tree, pt) @@ -2973,7 +3074,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree protected def makeContextualFunction(tree: untpd.Tree, pt: Type)(using Context): Tree = { - val defn.FunctionOf(formals, _, true, _) = pt.dropDependentRefinement: @unchecked + val defn.FunctionOf(formals, _, true) = pt.dropDependentRefinement: @unchecked // The getter of default parameters may reach here. // Given the code below @@ -3001,7 +3102,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else formals.map(untpd.TypeTree) } - val ifun = desugar.makeContextualFunction(paramTypes, tree, defn.isErasedFunctionType(pt)) + val erasedParams = pt.dealias match { + case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams + case _ => paramTypes.map(_ => false) + } + + val ifun = desugar.makeContextualFunction(paramTypes, tree, erasedParams) typr.println(i"make contextual function $tree / $pt ---> $ifun") typedFunctionValue(ifun, pt) } @@ -3071,16 +3177,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer traverse(xtree :: rest) case stat :: rest => val stat1 = typed(stat)(using ctx.exprContext(stat, exprOwner)) - checkStatementPurity(stat1)(stat, exprOwner) + if !checkInterestingResultInStatement(stat1) then checkStatementPurity(stat1)(stat, exprOwner) buf += stat1 traverse(rest)(using stat1.nullableContext) case nil => (buf.toList, ctx) } - val localCtx = { - val exprOwnerOpt = if (exprOwner == ctx.owner) None else Some(exprOwner) - ctx.withProperty(ExprOwner, exprOwnerOpt) - } def finalize(stat: Tree)(using Context): Tree = stat match { case stat: TypeDef if stat.symbol.is(Module) => val enumContext = enumContexts(stat.symbol.linkedClass) @@ -3093,7 +3195,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => stat } - val (stats0, finalCtx) = traverse(stats)(using localCtx) + val (stats0, finalCtx) = traverse(stats) val stats1 = stats0.mapConserve(finalize) if ctx.owner == exprOwner then checkNoTargetNameConflict(stats1) (stats1, finalCtx) @@ -3342,7 +3444,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case SearchSuccess(found, _, _, isExtension) => if isExtension then return found else - checkImplicitConversionUseOK(found) + checkImplicitConversionUseOK(found, selProto) return withoutMode(Mode.ImplicitsEnabled)(typedSelect(tree, pt, found)) case failure: SearchFailure => if failure.isAmbiguous then @@ -3416,42 +3518,59 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ErrorReporting.missingArgs(tree, mt) tree.withType(mt.resultType) - def adaptOverloaded(ref: TermRef) = { + def adaptOverloaded(ref: TermRef) = + // get all the alternatives val altDenots = val allDenots = ref.denot.alternatives if pt.isExtensionApplyProto then allDenots.filter(_.symbol.is(ExtensionMethod)) else allDenots + typr.println(i"adapt overloaded $ref with alternatives ${altDenots map (_.info)}%\n\n %") + + /** Search for an alternative that does not take parameters. + * If there is one, return it, otherwise emit an error. + */ + def tryParameterless(alts: List[TermRef])(error: => tpd.Tree): Tree = + alts.filter(_.info.isParameterless) match + case alt :: Nil => readaptSimplified(tree.withType(alt)) + case _ => + if altDenots.exists(_.info.paramInfoss == ListOfNil) then + typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt, locked) + else + error + def altRef(alt: SingleDenotation) = TermRef(ref.prefix, ref.name, alt) val alts = altDenots.map(altRef) - resolveOverloaded(alts, pt) match { + + resolveOverloaded(alts, pt) match case alt :: Nil => readaptSimplified(tree.withType(alt)) case Nil => - // If alternative matches, there are still two ways to recover: + // If no alternative matches, there are still two ways to recover: // 1. If context is an application, try to insert an apply or implicit // 2. If context is not an application, pick a alternative that does // not take parameters. - def noMatches = - errorTree(tree, NoMatchingOverload(altDenots, pt)) - def hasEmptyParams(denot: SingleDenotation) = denot.info.paramInfoss == ListOfNil - pt match { + + def errorNoMatch = errorTree(tree, NoMatchingOverload(altDenots, pt)) + + pt match case pt: FunOrPolyProto if pt.applyKind != ApplyKind.Using => // insert apply or convert qualifier, but only for a regular application - tryInsertApplyOrImplicit(tree, pt, locked)(noMatches) + tryInsertApplyOrImplicit(tree, pt, locked)(errorNoMatch) case _ => - alts.filter(_.info.isParameterless) match { - case alt :: Nil => readaptSimplified(tree.withType(alt)) - case _ => - if (altDenots exists (_.info.paramInfoss == ListOfNil)) - typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt, locked) - else - noMatches - } - } + tryParameterless(alts)(errorNoMatch) + case ambiAlts => - if tree.tpe.isErroneous || pt.isErroneous then tree.withType(UnspecifiedErrorType) - else + // If there are ambiguous alternatives, and: + // 1. the types aren't erroneous + // 2. the expected type is not a function type + // 3. there exist a parameterless alternative + // + // Then, pick the parameterless alternative. + // See tests/pos/i10715-scala and tests/pos/i10715-java. + + /** Constructs an "ambiguous overload" error */ + def errorAmbiguous = val remainingDenots = altDenots.filter(denot => ambiAlts.contains(altRef(denot))) val addendum = if ambiAlts.exists(!_.symbol.exists) then @@ -3460,8 +3579,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer |Note: Overloaded definitions introduced by refinements cannot be resolved""" else "" errorTree(tree, AmbiguousOverload(tree, remainingDenots, pt, addendum)) - } - } + end errorAmbiguous + + if tree.tpe.isErroneous || pt.isErroneous then + tree.withType(UnspecifiedErrorType) + else + pt match + case _: FunProto => + errorAmbiguous + case _ => + tryParameterless(alts)(errorAmbiguous) + + end match + end adaptOverloaded def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match { case wtp: MethodOrPoly => @@ -3698,7 +3828,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (!defn.isFunctionType(pt)) pt match { case SAMType(_) if !pt.classSymbol.hasAnnotation(defn.FunctionalInterfaceAnnot) => - report.warning(ex"${tree.symbol} is eta-expanded even though $pt does not have the @FunctionalInterface annotation.", tree.srcPos) + report.warning(em"${tree.symbol} is eta-expanded even though $pt does not have the @FunctionalInterface annotation.", tree.srcPos) case _ => } simplify(typed(etaExpand(tree, wtp, arity), pt), pt, locked) @@ -3721,24 +3851,24 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer true } - if ((implicitFun || caseCompanion) && - !isApplyProto(pt) && - pt != AssignProto && - !ctx.mode.is(Mode.Pattern) && - !ctx.isAfterTyper && - !ctx.isInlineContext) { + if (implicitFun || caseCompanion) + && !isApplyProto(pt) + && pt != SingletonTypeProto + && pt != AssignProto + && !ctx.mode.is(Mode.Pattern) + && !ctx.isAfterTyper + && !ctx.isInlineContext + then typr.println(i"insert apply on implicit $tree") val sel = untpd.Select(untpd.TypedSplice(tree), nme.apply).withAttachment(InsertedApply, ()) try typed(sel, pt, locked) finally sel.removeAttachment(InsertedApply) - } - else if (ctx.mode is Mode.Pattern) { + else if ctx.mode is Mode.Pattern then checkEqualityEvidence(tree, pt) tree - } else val meth = methPart(tree).symbol if meth.isAllOf(DeferredInline) && !Inlines.inInlineMethod then - errorTree(tree, i"Deferred inline ${meth.showLocated} cannot be invoked") + errorTree(tree, em"Deferred inline ${meth.showLocated} cannot be invoked") else if Inlines.needsInlining(tree) then tree.tpe <:< wildApprox(pt) val errorCount = ctx.reporter.errorCount @@ -3758,8 +3888,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } else { report.error( - """Scala 2 macro cannot be used in Dotty. See https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html - |To turn this error into a warning, pass -Xignore-scala2-macros to the compiler""".stripMargin, tree.srcPos.startPos) + em"""Scala 2 macro cannot be used in Dotty. See https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html + |To turn this error into a warning, pass -Xignore-scala2-macros to the compiler""", + tree.srcPos.startPos) tree } else TypeComparer.testSubType(tree.tpe.widenExpr, pt) match @@ -3772,7 +3903,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer adaptToSubType(wtp) case CompareResult.OKwithGADTUsed if pt.isValueType - && !inContext(ctx.fresh.setGadt(EmptyGadtConstraint)) { + && !inContext(ctx.fresh.setGadtState(GadtState(GadtConstraint.empty))) { val res = (tree.tpe.widenExpr frozen_<:< pt) if res then // we overshot; a cast is not needed, after all. @@ -3837,7 +3968,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else defn.functionArity(ptNorm) else val nparams = wtp.paramInfos.length - if nparams > 0 || pt.eq(AnyFunctionProto) then nparams + if nparams > 1 + || nparams == 1 && !wtp.isVarArgsMethod + || pt.eq(AnyFunctionProto) + then nparams else -1 // no eta expansion in this case adaptNoArgsUnappliedMethod(wtp, funExpected, arity) case _ => @@ -3877,7 +4011,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return adaptConstant(tree, ConstantType(converted)) case _ => - val captured = captureWildcards(wtp) + val captured = captureWildcardsCompat(wtp, pt) if (captured `ne` wtp) return readapt(tree.cast(captured)) @@ -3887,6 +4021,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // so will take the code path that decides on inlining val tree1 = adapt(tree, WildcardType, locked) checkStatementPurity(tree1)(tree, ctx.owner) + if (!ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && ctx.settings.WvalueDiscard.value && !isThisTypeResult(tree)) { + report.warning(ValueDiscarding(tree.tpe), tree.srcPos) + } return tpd.Block(tree1 :: Nil, Literal(Constant(()))) } @@ -3928,26 +4065,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else err.typeMismatch(tree, pt, failure) pt match - case pt: SelectionProto => - if ctx.gadt.isNarrowing then - // try GADT approximation if we're trying to select a member - // Member lookup cannot take GADTs into account b/c of cache, so we - // approximate types based on GADT constraints instead. For an example, - // see MemberHealing in gadt-approximation-interaction.scala. - gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") - val gadtApprox = Inferencing.approximateGADT(wtp) - gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") - if pt.isMatchedBy(gadtApprox) then - gadts.println(i"Member selection healed by GADT approximation") - tree.cast(gadtApprox) - else tree - else if tree.tpe.derivesFrom(defn.PairClass) && !defn.isTupleNType(tree.tpe.widenDealias) then - // If this is a generic tuple we need to cast it to make the TupleN/ members accessible. - // This only works for generic tuples of know size up to 22. - defn.tupleTypes(tree.tpe.widenTermRefExpr, Definitions.MaxTupleArity) match - case Some(elems) => tree.cast(defn.tupleType(elems)) - case None => tree - else tree // other adaptations for selections are handled in typedSelect + case _: SelectionProto => + tree // adaptations for selections are handled in typedSelect case _ if ctx.mode.is(Mode.ImplicitsEnabled) && tree.tpe.isValueType => if pt.isRef(defn.AnyValClass, skipRefined = false) || pt.isRef(defn.ObjectClass, skipRefined = false) @@ -3957,7 +4076,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case SearchSuccess(found, _, _, isExtension) => if isExtension then found else - checkImplicitConversionUseOK(found) + checkImplicitConversionUseOK(found, pt) withoutMode(Mode.ImplicitsEnabled)(readapt(found)) case failure: SearchFailure => if (pt.isInstanceOf[ProtoType] && !failure.isAmbiguous) then @@ -4013,6 +4132,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Select(qual)(pre, name.toTypeName) case qual: This if qual.symbol.is(ModuleClass) => cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) + case _ => + errorTree(tree, em"cannot convert from $tree to an instance creation expression") val tycon = tree.tpe.widen.finalResultType.underlyingClassRef(refinementOK = false) typed( untpd.Select( @@ -4143,6 +4264,59 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedExpr(cmp, defn.BooleanType) case _ => + private def checkInterestingResultInStatement(t: Tree)(using Context): Boolean = { + def isUninterestingSymbol(sym: Symbol): Boolean = + sym == NoSymbol || + sym.isConstructor || + sym.is(Package) || + sym.isPackageObject || + sym == defn.BoxedUnitClass || + sym == defn.AnyClass || + sym == defn.AnyRefAlias || + sym == defn.AnyValClass + def isUninterestingType(tpe: Type): Boolean = + tpe == NoType || + tpe.typeSymbol == defn.UnitClass || + defn.isBottomClass(tpe.typeSymbol) || + tpe =:= defn.UnitType || + tpe.typeSymbol == defn.BoxedUnitClass || + tpe =:= defn.AnyValType || + tpe =:= defn.AnyType || + tpe =:= defn.AnyRefType + def isJavaApplication(t: Tree): Boolean = t match { + case Apply(f, _) => f.symbol.is(JavaDefined) && !defn.ObjectClass.isSubClass(f.symbol.owner) + case _ => false + } + def checkInterestingShapes(t: Tree): Boolean = t match { + case If(_, thenpart, elsepart) => checkInterestingShapes(thenpart) || checkInterestingShapes(elsepart) + case Block(_, res) => checkInterestingShapes(res) + case Match(_, cases) => cases.exists(k => checkInterestingShapes(k.body)) + case _ => checksForInterestingResult(t) + } + def checksForInterestingResult(t: Tree): Boolean = ( + !t.isDef // ignore defs + && !isUninterestingSymbol(t.symbol) // ctors, package, Unit, Any + && !isUninterestingType(t.tpe) // bottom types, Unit, Any + && !isThisTypeResult(t) // buf += x + && !isSuperConstrCall(t) // just a thing + && !isJavaApplication(t) // Java methods are inherently side-effecting + // && !treeInfo.hasExplicitUnit(t) // suppressed by explicit expr: Unit // TODO Should explicit `: Unit` be added as warning suppression? + ) + if ctx.settings.WNonUnitStatement.value && !ctx.isAfterTyper && checkInterestingShapes(t) then + val where = t match { + case Block(_, res) => res + case If(_, thenpart, Literal(Constant(()))) => + thenpart match { + case Block(_, res) => res + case _ => thenpart + } + case _ => t + } + report.warning(UnusedNonUnitValue(where.tpe), t.srcPos) + true + else false + } + private def checkStatementPurity(tree: tpd.Tree)(original: untpd.Tree, exprOwner: Symbol)(using Context): Unit = if !tree.tpe.isErroneous && !ctx.isAfterTyper @@ -4187,11 +4361,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => if !config.Feature.scala2ExperimentalMacroEnabled then report.error( - """Scala 2 macro definition needs to be enabled - |by making the implicit value scala.language.experimental.macros visible. - |This can be achieved by adding the import clause 'import scala.language.experimental.macros' - |or by setting the compiler option -language:experimental.macros. - """.stripMargin, call.srcPos) + em"""Scala 2 macro definition needs to be enabled + |by making the implicit value scala.language.experimental.macros visible. + |This can be achieved by adding the import clause 'import scala.language.experimental.macros' + |or by setting the compiler option -language:experimental.macros. + """, + call.srcPos) call match case call: untpd.Ident => typedIdent(call, defn.AnyType) @@ -4206,7 +4381,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedTypeApply(call2, defn.AnyType) } case _ => - report.error("Invalid Scala 2 macro " + call.show, call.srcPos) + report.error(em"Invalid Scala 2 macro $call", call.srcPos) EmptyTree else typedExpr(call, defn.AnyType) @@ -4236,7 +4411,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // this is needed for -Ycheck. Without the annotation Ycheck will // skolemize the result type which will lead to different types before // and after checking. See i11955.scala. - AnnotatedType(conj, Annotation(defn.UncheckedStableAnnot)) + AnnotatedType(conj, Annotation(defn.UncheckedStableAnnot, tree.symbol.span)) else conj else pt gadts.println(i"insert GADT cast from $tree to $target") diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index 53646558cf5c..bcfc9288d862 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -164,11 +164,11 @@ class VarianceChecker(using Context) { i"\n${hl("enum case")} ${towner.name} requires explicit declaration of $tvar to resolve this issue.\n$example" else "" - i"${varianceLabel(tvar.flags)} $tvar occurs in ${varianceLabel(required)} position in type ${sym.info} of $sym$enumAddendum" + em"${varianceLabel(tvar.flags)} $tvar occurs in ${varianceLabel(required)} position in type ${sym.info} of $sym$enumAddendum" if (migrateTo3 && (sym.owner.isConstructor || sym.ownersIterator.exists(_.isAllOf(ProtectedLocal)))) report.migrationWarning( - s"According to new variance rules, this is no longer accepted; need to annotate with @uncheckedVariance:\n$msg", + msg.prepend("According to new variance rules, this is no longer accepted; need to annotate with @uncheckedVariance\n"), pos) // patch(Span(pos.end), " @scala.annotation.unchecked.uncheckedVariance") // Patch is disabled until two TODOs are solved: diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala index 471b68d6247e..cde1a63f5293 100644 --- a/compiler/src/dotty/tools/dotc/util/Chars.scala +++ b/compiler/src/dotty/tools/dotc/util/Chars.scala @@ -1,21 +1,20 @@ package dotty.tools.dotc.util import scala.annotation.switch -import java.lang.{Character => JCharacter} -import java.lang.Character.LETTER_NUMBER -import java.lang.Character.LOWERCASE_LETTER -import java.lang.Character.OTHER_LETTER -import java.lang.Character.TITLECASE_LETTER -import java.lang.Character.UPPERCASE_LETTER +import Character.{LETTER_NUMBER, LOWERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, UPPERCASE_LETTER} +import Character.{MATH_SYMBOL, OTHER_SYMBOL} +import Character.{isJavaIdentifierPart, isUnicodeIdentifierStart, isUnicodeIdentifierPart} /** Contains constants and classifier methods for characters */ -object Chars { +object Chars: inline val LF = '\u000A' inline val FF = '\u000C' inline val CR = '\u000D' inline val SU = '\u001A' + type CodePoint = Int + /** Convert a character digit to an Int according to given base, * -1 if no success */ @@ -59,17 +58,21 @@ object Chars { '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' /** Can character start an alphanumeric Scala identifier? */ - def isIdentifierStart(c: Char): Boolean = - (c == '_') || (c == '$') || JCharacter.isUnicodeIdentifierStart(c) + def isIdentifierStart(c: Char): Boolean = (c == '_') || (c == '$') || isUnicodeIdentifierStart(c) + def isIdentifierStart(c: CodePoint): Boolean = (c == '_') || (c == '$') || isUnicodeIdentifierStart(c) /** Can character form part of an alphanumeric Scala identifier? */ - def isIdentifierPart(c: Char): Boolean = - (c == '$') || JCharacter.isUnicodeIdentifierPart(c) + def isIdentifierPart(c: Char): Boolean = (c == '$') || isUnicodeIdentifierPart(c) + def isIdentifierPart(c: CodePoint) = (c == '$') || isUnicodeIdentifierPart(c) /** Is character a math or other symbol in Unicode? */ def isSpecial(c: Char): Boolean = { - val chtp = JCharacter.getType(c) - chtp == JCharacter.MATH_SYMBOL.toInt || chtp == JCharacter.OTHER_SYMBOL.toInt + val chtp = Character.getType(c) + chtp == MATH_SYMBOL.toInt || chtp == OTHER_SYMBOL.toInt + } + def isSpecial(codePoint: CodePoint) = { + val chtp = Character.getType(codePoint) + chtp == MATH_SYMBOL.toInt || chtp == OTHER_SYMBOL.toInt } def isValidJVMChar(c: Char): Boolean = @@ -78,15 +81,26 @@ object Chars { def isValidJVMMethodChar(c: Char): Boolean = !(c == '.' || c == ';' || c =='[' || c == '/' || c == '<' || c == '>') - private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_' - private final val letterGroups = { - import JCharacter._ - Set[Byte](LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER) - } - def isScalaLetter(ch: Char): Boolean = letterGroups(JCharacter.getType(ch).toByte) || otherLetters(ch) + def isScalaLetter(c: Char): Boolean = + Character.getType(c: @switch) match { + case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true + case _ => c == '$' || c == '_' + } + def isScalaLetter(c: CodePoint): Boolean = + Character.getType(c: @switch) match { + case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true + case _ => c == '$' || c == '_' + } /** Can character form part of a Scala operator name? */ - def isOperatorPart(c : Char) : Boolean = (c: @switch) match { + def isOperatorPart(c: Char): Boolean = (c: @switch) match { + case '~' | '!' | '@' | '#' | '%' | + '^' | '*' | '+' | '-' | '<' | + '>' | '?' | ':' | '=' | '&' | + '|' | '/' | '\\' => true + case c => isSpecial(c) + } + def isOperatorPart(c: CodePoint): Boolean = (c: @switch) match { case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | @@ -95,5 +109,4 @@ object Chars { } /** Would the character be encoded by `NameTransformer.encode`? */ - def willBeEncoded(c : Char) : Boolean = !JCharacter.isJavaIdentifierPart(c) -} + def willBeEncoded(c: Char): Boolean = !isJavaIdentifierPart(c) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index fd6518fcc15c..a21a4af37038 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -42,9 +42,10 @@ abstract class GenericHashMap[Key, Value] else 1 << (32 - Integer.numberOfLeadingZeros(n)) /** Remove all elements from this table and set back to initial configuration */ - def clear(): Unit = + def clear(resetToInitial: Boolean): Unit = used = 0 - allocate(roundToPower(initialCapacity)) + if resetToInitial then allocate(roundToPower(initialCapacity)) + else java.util.Arrays.fill(table, null) /** The number of elements in the set */ def size: Int = used diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index a524dd39a594..a6e1532c804f 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -44,11 +44,10 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu else if Integer.bitCount(n) == 1 then n else 1 << (32 - Integer.numberOfLeadingZeros(n)) - /** Remove all elements from this set and set back to initial configuration */ - def clear(): Unit = { + def clear(resetToInitial: Boolean): Unit = used = 0 - allocate(roundToPower(initialCapacity)) - } + if resetToInitial then allocate(roundToPower(initialCapacity)) + else java.util.Arrays.fill(table, null) /** The number of elements in the set */ def size: Int = used diff --git a/compiler/src/dotty/tools/dotc/util/MutableMap.scala b/compiler/src/dotty/tools/dotc/util/MutableMap.scala index ba912a312aea..283e28e7e04f 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableMap.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableMap.scala @@ -13,6 +13,10 @@ abstract class MutableMap[Key, Value] extends ReadOnlyMap[Key, Value]: remove(k) this - def clear(): Unit + /** Remove all bindings from this map. + * @param resetToInitial If true, set back to initial configuration, which includes + * reallocating tables. + */ + def clear(resetToInitial: Boolean = true): Unit def getOrElseUpdate(key: Key, value: => Value): Value diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala index 6e3ae7628eb6..9529262fa5ec 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -15,7 +15,11 @@ abstract class MutableSet[T] extends ReadOnlySet[T]: /** Remove element `x` from the set */ def -=(x: T): Unit - def clear(): Unit + /** Remove all elements from this set. + * @param resetToInitial If true, set back to initial configuration, which includes + * reallocating tables. + */ + def clear(resetToInitial: Boolean = true): Unit def ++= (xs: IterableOnce[T]): Unit = xs.iterator.foreach(this += _) diff --git a/compiler/src/dotty/tools/dotc/util/Spans.scala b/compiler/src/dotty/tools/dotc/util/Spans.scala index baf2cfa121b0..ba537e9aec01 100644 --- a/compiler/src/dotty/tools/dotc/util/Spans.scala +++ b/compiler/src/dotty/tools/dotc/util/Spans.scala @@ -182,6 +182,7 @@ object Spans { assert(isSpan) if (this == NoCoord) NoSpan else Span(-1 - encoding) } + override def toString = if isSpan then s"$toSpan" else s"Coord(idx=$toIndex)" } /** An index coordinate */ diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index f04957f26400..e9b72015b202 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -55,15 +55,14 @@ import collection.mutable } def maybeMonitored[T](op: => T)(using Context): T = - if (ctx.settings.YdetailedStats.value && hits.nonEmpty) { + if ctx.settings.YdetailedStats.value then monitored = true try op - finally { - aggregate() - println() - println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n") - hits.clear() - } - } + finally + if hits.nonEmpty then + aggregate() + println() + println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n") + hits.clear() else op } diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala index 3c23b181a041..975826a87a37 100644 --- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala @@ -204,7 +204,7 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do linkedListLoop(null, table(bucket)) } - def clear(): Unit = { + def clear(resetToInitial: Boolean): Unit = { table = new Array[Entry[A] | Null](table.size) threshold = computeThreshold count = 0 diff --git a/compiler/src/dotty/tools/dotc/util/concurrent.scala b/compiler/src/dotty/tools/dotc/util/concurrent.scala new file mode 100644 index 000000000000..2710aae6c9b0 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/concurrent.scala @@ -0,0 +1,62 @@ +package dotty.tools.dotc.util +import scala.util.{Try, Failure, Success} +import scala.collection.mutable.ArrayBuffer + +object concurrent: + + class NoCompletion extends RuntimeException + + class Future[T](exec: Executor[T]): + private var result: Option[Try[T]] = None + def force: Try[T] = synchronized { + while result.isEmpty && exec.isAlive do wait(1000 /*ms*/) + result.getOrElse(Failure(NoCompletion())) + } + def complete(r: Try[T]): Unit = synchronized { + result = Some(r) + notifyAll() + } + end Future + + class Executor[T] extends Thread: + private type WorkItem = (Future[T], () => T) + + private var allScheduled = false + private val pending = new ArrayBuffer[WorkItem] + + def schedule(op: () => T): Future[T] = synchronized { + assert(!allScheduled) + val f = Future[T](this) + pending += ((f, op)) + notifyAll() + f + } + + def close(): Unit = synchronized { + allScheduled = true + notifyAll() + } + + private def nextPending(): Option[WorkItem] = synchronized { + while pending.isEmpty && !allScheduled do wait(1000 /*ms*/) + if pending.isEmpty then None + else + val item = pending.head + pending.dropInPlace(1) + Some(item) + } + + override def run(): Unit = + while + nextPending() match + case Some((f, op)) => + f.complete(Try(op())) + true + case None => + false + do () + end Executor +end concurrent + + + diff --git a/compiler/src/dotty/tools/dotc/util/lrutest.sc b/compiler/src/dotty/tools/dotc/util/lrutest.sc index 6e6328b248e3..9c811a65a70a 100644 --- a/compiler/src/dotty/tools/dotc/util/lrutest.sc +++ b/compiler/src/dotty/tools/dotc/util/lrutest.sc @@ -15,12 +15,12 @@ object lrutest { cache.last //> res4: Int = 6 cache lookup "hi" //> res5: String = x cache.indices.take(10).toList //> res6: List[Int] = List(7, 0, 1, 2, 3, 4, 5, 6, 7, 0) - + for (i <- 1 to 10) { if (cache.lookup(i.toString) == null) cache.enter(i.toString, i.toString) } - + cache.indices.take(10).toList //> res7: List[Int] = List(5, 6, 7, 0, 1, 2, 3, 4, 5, 6) cache //> res8: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(10 -> 10, 9 - //| > 9, 8 -> 8, 7 -> 7, 6 -> 6, 5 -> 5, 4 -> 4, 3 -> 3) @@ -35,6 +35,6 @@ object lrutest { //| > 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4, 3 -> 3) cache.lookup("11") //> res16: String = null cache.enter("11", "!!") - cache //> res17: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(11 -> !!, 5 + cache //> res17: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(11 -> !!, 5 //| -> 5, 10 -> 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4) } \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/util/optional.scala b/compiler/src/dotty/tools/dotc/util/optional.scala new file mode 100644 index 000000000000..cb62315d3c98 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/optional.scala @@ -0,0 +1,19 @@ +package dotty.tools.dotc.util + +import scala.util.boundary + +/** Return type that indicates that the method returns a T or aborts to the enclosing boundary with a `None` */ +type optional[T] = boundary.Label[None.type] ?=> T + +/** A prompt for `Option`, which establishes a boundary which `_.?` on `Option` can return */ +object optional: + inline def apply[T](inline body: optional[T]): Option[T] = + boundary(Some(body)) + + extension [T](r: Option[T]) + inline def ? (using label: boundary.Label[None.type]): T = r match + case Some(x) => x + case None => boundary.break(None) + + inline def break()(using label: boundary.Label[None.type]): Nothing = + boundary.break(None) diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index 29bc764dcd7b..f34fe6f40b9c 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -260,8 +260,10 @@ abstract class AbstractFile extends Iterable[AbstractFile] { // a race condition in creating the entry after the failed lookup may throw val path = jpath.resolve(name) - if (isDir) Files.createDirectory(path) - else Files.createFile(path) + try + if (isDir) Files.createDirectory(path) + else Files.createFile(path) + catch case _: FileAlreadyExistsException => () new PlainFile(new File(path)) case lookup => lookup } diff --git a/compiler/src/dotty/tools/io/JDK9Reflectors.java b/compiler/src/dotty/tools/io/JDK9Reflectors.java index 1b0ce5deabab..9816cc03f92a 100644 --- a/compiler/src/dotty/tools/io/JDK9Reflectors.java +++ b/compiler/src/dotty/tools/io/JDK9Reflectors.java @@ -32,7 +32,7 @@ public final class JDK9Reflectors { } // Classes from java.lang.Runtime are not available in JDK 8 so using them explicitly would prevent this file from compiling with JDK 8 - // but these methods are not called in runtime when using this version of JDK + // but these methods are not called in runtime when using this version of JDK public static /*java.lang.Runtime.Version*/ Object runtimeVersionParse(String string) { try { diff --git a/compiler/src/dotty/tools/package.scala b/compiler/src/dotty/tools/package.scala index 79488c70cf6b..f90355b1fa8e 100644 --- a/compiler/src/dotty/tools/package.scala +++ b/compiler/src/dotty/tools/package.scala @@ -1,10 +1,6 @@ package dotty package object tools { - // Ensure this object is already classloaded, since it's only actually used - // when handling stack overflows and every operation (including class loading) - // risks failing. - dotty.tools.dotc.core.handleRecursive val ListOfNil: List[Nil.type] = Nil :: Nil @@ -18,7 +14,7 @@ package object tools { * Flow-typing under explicit nulls will automatically insert many necessary * occurrences of uncheckedNN. */ - inline def uncheckedNN: T = x.asInstanceOf[T] + transparent inline def uncheckedNN: T = x.asInstanceOf[T] inline def toOption: Option[T] = if x == null then None else Some(x.asInstanceOf[T]) @@ -42,4 +38,16 @@ package object tools { def unreachable(x: Any = "<< this case was declared unreachable >>"): Nothing = throw new MatchError(x) -} + + transparent inline def assertShort(inline assertion: Boolean, inline message: Any = null): Unit = + if !assertion then + val msg = message + val e = if msg == null then AssertionError() else AssertionError("assertion failed: " + msg) + e.setStackTrace(Array()) + throw e + + // Ensure this object is already classloaded, since it's only actually used + // when handling stack overflows and every operation (including class loading) + // risks failing. + dotty.tools.dotc.core.handleRecursive + } diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala index 9da12ae955d1..8e048d786ae1 100644 --- a/compiler/src/dotty/tools/repl/JLineTerminal.scala +++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala @@ -16,7 +16,7 @@ import org.jline.reader.impl.history.DefaultHistory import org.jline.terminal.TerminalBuilder import org.jline.utils.AttributedString -final class JLineTerminal extends java.io.Closeable { +class JLineTerminal extends java.io.Closeable { // import java.util.logging.{Logger, Level} // Logger.getLogger("org.jline").setLevel(Level.FINEST) @@ -30,7 +30,8 @@ final class JLineTerminal extends java.io.Closeable { private def blue(str: String)(using Context) = if (ctx.settings.color.value != "never") Console.BLUE + str + Console.RESET else str - private def prompt(using Context) = blue("\nscala> ") + protected def promptStr = "scala" + private def prompt(using Context) = blue(s"\n$promptStr> ") private def newLinePrompt(using Context) = blue(" | ") /** Blockingly read line from `System.in` diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index 608ca23c5fec..c647ef302bb9 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -3,18 +3,14 @@ package repl import scala.language.unsafeNulls -import java.lang.{ ClassLoader, ExceptionInInitializerError } -import java.lang.reflect.InvocationTargetException - -import dotc.core.Contexts._ -import dotc.core.Denotations.Denotation -import dotc.core.Flags -import dotc.core.Flags._ -import dotc.core.Symbols.{Symbol, defn} -import dotc.core.StdNames.{nme, str} -import dotc.printing.ReplPrinter -import dotc.reporting.Diagnostic -import dotc.transform.ValueClasses +import dotc.*, core.* +import Contexts.*, Denotations.*, Flags.*, NameOps.*, StdNames.*, Symbols.* +import printing.ReplPrinter +import reporting.Diagnostic +import transform.ValueClasses +import util.StackTraceOps.* + +import scala.util.control.NonFatal /** This rendering object uses `ClassLoader`s to accomplish crossing the 4th * wall (i.e. fetching back values from the compiled class files put into a @@ -28,12 +24,10 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): import Rendering._ - private val MaxStringElements: Int = 1000 // no need to mkString billions of elements - - private var myClassLoader: AbstractFileClassLoader = _ - - private var myReplStringOf: Object => String = _ + var myClassLoader: AbstractFileClassLoader = _ + /** (value, maxElements, maxCharacters) => String */ + var myReplStringOf: (Object, Int, Int) => String = _ /** Class loader used to load compiled code */ private[repl] def classLoader()(using Context) = @@ -59,40 +53,50 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): // `ScalaRunTime.replStringOf`. Probe for new API without extraneous newlines. // For old API, try to clean up extraneous newlines by stripping suffix and maybe prefix newline. val scalaRuntime = Class.forName("scala.runtime.ScalaRunTime", true, myClassLoader) - val renderer = "stringOf" // was: replStringOf - try { - val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int], classOf[Boolean]) - val truly = java.lang.Boolean.TRUE - - (value: Object) => meth.invoke(null, value, Integer.valueOf(MaxStringElements), truly).asInstanceOf[String] - } catch { - case _: NoSuchMethodException => - val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int]) + val renderer = "stringOf" + def stringOfMaybeTruncated(value: Object, maxElements: Int): String = { + try { + val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int], classOf[Boolean]) + val truly = java.lang.Boolean.TRUE + meth.invoke(null, value, maxElements, truly).asInstanceOf[String] + } catch { + case _: NoSuchMethodException => + val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int]) + meth.invoke(null, value, maxElements).asInstanceOf[String] + } + } - (value: Object) => meth.invoke(null, value, Integer.valueOf(MaxStringElements)).asInstanceOf[String] + (value: Object, maxElements: Int, maxCharacters: Int) => { + // `ScalaRuntime.stringOf` may truncate the output, in which case we want to indicate that fact to the user + // In order to figure out if it did get truncated, we invoke it twice - once with the `maxElements` that we + // want to print, and once without a limit. If the first is shorter, truncation did occur. + val notTruncated = stringOfMaybeTruncated(value, Int.MaxValue) + val maybeTruncatedByElementCount = stringOfMaybeTruncated(value, maxElements) + val maybeTruncated = truncate(maybeTruncatedByElementCount, maxCharacters) + + // our string representation may have been truncated by element and/or character count + // if so, append an info string - but only once + if (notTruncated.length == maybeTruncated.length) maybeTruncated + else s"$maybeTruncated ... large output truncated, print value to show all" } + } myClassLoader } - /** Used to elide long output in replStringOf. - * - * TODO: Perhaps implement setting scala.repl.maxprintstring as in Scala 2, but - * then this bug will surface, so perhaps better not? - * https://github.com/scala/bug/issues/12337 - */ - private[repl] def truncate(str: String): String = - val showTruncated = " ... large output truncated, print value to show all" + private[repl] def truncate(str: String, maxPrintCharacters: Int)(using ctx: Context): String = val ncp = str.codePointCount(0, str.length) // to not cut inside code point - if ncp <= MaxStringElements then str - else str.substring(0, str.offsetByCodePoints(0, MaxStringElements - 1)) + showTruncated + if ncp <= maxPrintCharacters then str + else str.substring(0, str.offsetByCodePoints(0, maxPrintCharacters - 1)) /** Return a String representation of a value we got from `classLoader()`. */ private[repl] def replStringOf(value: Object)(using Context): String = assert(myReplStringOf != null, "replStringOf should only be called on values creating using `classLoader()`, but `classLoader()` has not been called so far") - val res = myReplStringOf(value) - if res == null then "null // non-null reference has null-valued toString" else truncate(res) + val maxPrintElements = ctx.settings.VreplMaxPrintElements.valueIn(ctx.settingsState) + val maxPrintCharacters = ctx.settings.VreplMaxPrintCharacters.valueIn(ctx.settingsState) + val res = myReplStringOf(value, maxPrintElements, maxPrintCharacters) + if res == null then "null // non-null reference has null-valued toString" else res /** Load the value of the symbol using reflection. * @@ -123,8 +127,7 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): */ private def rewrapValueClass(sym: Symbol, value: Object)(using Context): Option[Object] = if ValueClasses.isDerivedValueClass(sym) then - val valueClassName = sym.flatName.encode.toString - val valueClass = Class.forName(valueClassName, true, classLoader()) + val valueClass = Class.forName(sym.binaryClassName, true, classLoader()) valueClass.getConstructors.headOption.map(_.newInstance(value)) else Some(value) @@ -140,7 +143,7 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): infoDiagnostic(d.symbol.showUser, d) /** Render value definition result */ - def renderVal(d: Denotation)(using Context): Either[InvocationTargetException, Option[Diagnostic]] = + def renderVal(d: Denotation)(using Context): Either[ReflectiveOperationException, Option[Diagnostic]] = val dcl = d.symbol.showUser def msg(s: String) = infoDiagnostic(s, d) try @@ -148,12 +151,11 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): if d.symbol.is(Flags.Lazy) then Some(msg(dcl)) else valueOf(d.symbol).map(value => msg(s"$dcl = $value")) ) - catch case e: InvocationTargetException => Left(e) + catch case e: ReflectiveOperationException => Left(e) end renderVal /** Force module initialization in the absence of members. */ def forceModule(sym: Symbol)(using Context): Seq[Diagnostic] = - import scala.util.control.NonFatal def load() = val objectName = sym.fullName.encode.toString Class.forName(objectName, true, classLoader()) @@ -161,14 +163,11 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): try load() catch case e: ExceptionInInitializerError => List(renderError(e, sym.denot)) - case NonFatal(e) => List(renderError(InvocationTargetException(e), sym.denot)) + case NonFatal(e) => List(renderError(e, sym.denot)) /** Render the stack trace of the underlying exception. */ - def renderError(ite: InvocationTargetException | ExceptionInInitializerError, d: Denotation)(using Context): Diagnostic = - import dotty.tools.dotc.util.StackTraceOps._ - val cause = ite.getCause match - case e: ExceptionInInitializerError => e.getCause - case e => e + def renderError(thr: Throwable, d: Denotation)(using Context): Diagnostic = + val cause = rootCause(thr) // detect //at repl$.rs$line$2$.(rs$line$2:1) //at repl$.rs$line$2.res1(rs$line$2) @@ -182,7 +181,6 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): private def infoDiagnostic(msg: String, d: Denotation)(using Context): Diagnostic = new Diagnostic.Info(msg, d.symbol.sourcePos) - object Rendering: final val REPL_WRAPPER_NAME_PREFIX = str.REPL_SESSION_LINE @@ -192,3 +190,12 @@ object Rendering: val text = printer.dclText(s) text.mkString(ctx.settings.pageWidth.value, ctx.settings.printLines.value) } + + def rootCause(x: Throwable): Throwable = x match + case _: ExceptionInInitializerError | + _: java.lang.reflect.InvocationTargetException | + _: java.lang.reflect.UndeclaredThrowableException | + _: java.util.concurrent.ExecutionException + if x.getCause != null => + rootCause(x.getCause) + case _ => x diff --git a/compiler/src/dotty/tools/repl/ReplCompiler.scala b/compiler/src/dotty/tools/repl/ReplCompiler.scala index 8db288f50aca..764695e8479b 100644 --- a/compiler/src/dotty/tools/repl/ReplCompiler.scala +++ b/compiler/src/dotty/tools/repl/ReplCompiler.scala @@ -62,8 +62,8 @@ class ReplCompiler extends Compiler: } val rootCtx = super.rootContext.fresh - .setOwner(defn.EmptyPackageClass) .withRootImports + .fresh.setOwner(defn.EmptyPackageClass): Context (state.validObjectIndexes).foldLeft(rootCtx)((ctx, id) => importPreviousRun(id)(using ctx)) } diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 4fab4b119a08..905f4f06de08 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -37,6 +37,7 @@ import org.jline.reader._ import scala.annotation.tailrec import scala.collection.mutable import scala.jdk.CollectionConverters._ +import scala.util.control.NonFatal import scala.util.Using /** The state of the REPL contains necessary bindings instead of having to have @@ -118,7 +119,7 @@ class ReplDriver(settings: Array[String], private var rootCtx: Context = _ private var shouldStart: Boolean = _ private var compiler: ReplCompiler = _ - private var rendering: Rendering = _ + protected var rendering: Rendering = _ // initialize the REPL session as part of the constructor so that once `run` // is called, we're in business @@ -138,7 +139,7 @@ class ReplDriver(settings: Array[String], * observable outside of the CLI, for this reason, most helper methods are * `protected final` to facilitate testing. */ - final def runUntilQuit(using initialState: State = initialState)(): State = { + def runUntilQuit(using initialState: State = initialState)(): State = { val terminal = new JLineTerminal out.println( @@ -176,24 +177,44 @@ class ReplDriver(settings: Array[String], interpret(ParseResult.complete(input)) } - private def runBody(body: => State): State = rendering.classLoader()(using rootCtx).asContext(withRedirectedOutput(body)) + final def runQuietly(input: String)(using State): State = runBody { + val parsed = ParseResult(input) + interpret(parsed, quiet = true) + } + + protected def runBody(body: => State): State = rendering.classLoader()(using rootCtx).asContext(withRedirectedOutput(body)) // TODO: i5069 final def bind(name: String, value: Any)(using state: State): State = state + /** + * Controls whether the `System.out` and `System.err` streams are set to the provided constructor parameter instance + * of [[java.io.PrintStream]] during the execution of the repl. On by default. + * + * Disabling this can be beneficial when executing a repl instance inside a concurrent environment, for example a + * thread pool (such as the Scala compile server in the Scala Plugin for IntelliJ IDEA). + * + * In such environments, indepently executing `System.setOut` and `System.setErr` without any synchronization can + * lead to unpredictable results when restoring the original streams (dependent on the order of execution), leaving + * the Java process in an inconsistent state. + */ + protected def redirectOutput: Boolean = true + // redirecting the output allows us to test `println` in scripted tests private def withRedirectedOutput(op: => State): State = { - val savedOut = System.out - val savedErr = System.err - try { - System.setOut(out) - System.setErr(out) - op - } - finally { - System.setOut(savedOut) - System.setErr(savedErr) - } + if redirectOutput then + val savedOut = System.out + val savedErr = System.err + try { + System.setOut(out) + System.setErr(out) + op + } + finally { + System.setOut(savedOut) + System.setErr(savedErr) + } + else op } private def newRun(state: State, reporter: StoreReporter = newStoreReporter) = { @@ -236,16 +257,16 @@ class ReplDriver(settings: Array[String], unit.tpdTree = tree given Context = state.context.fresh.setCompilationUnit(unit) val srcPos = SourcePosition(file, Span(cursor)) - val (_, completions) = Completion.completions(srcPos) + val completions = try Completion.completions(srcPos)._2 catch case NonFatal(_) => Nil completions.map(_.label).distinct.map(makeCandidate) } .getOrElse(Nil) end completions - private def interpret(res: ParseResult)(using state: State): State = { + protected def interpret(res: ParseResult, quiet: Boolean = false)(using state: State): State = { res match { case parsed: Parsed if parsed.trees.nonEmpty => - compile(parsed, state) + compile(parsed, state, quiet) case SyntaxErrors(_, errs, _) => displayErrors(errs) @@ -263,7 +284,7 @@ class ReplDriver(settings: Array[String], } /** Compile `parsed` trees and evolve `state` in accordance */ - private def compile(parsed: Parsed, istate: State): State = { + private def compile(parsed: Parsed, istate: State, quiet: Boolean = false): State = { def extractNewestWrapper(tree: untpd.Tree): Name = tree match { case PackageDef(_, (obj: untpd.ModuleDef) :: Nil) => obj.name.moduleClassName case _ => nme.NO_NAME @@ -314,9 +335,11 @@ class ReplDriver(settings: Array[String], given Ordering[Diagnostic] = Ordering[(Int, Int, Int)].on(d => (d.pos.line, -d.level, d.pos.column)) - (definitions ++ warnings) - .sorted - .foreach(printDiagnostic) + if (!quiet) { + (definitions ++ warnings) + .sorted + .foreach(printDiagnostic) + } updatedState } diff --git a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala index 3c8c51d8d6b2..9ec0199abcbb 100644 --- a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala +++ b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala @@ -67,7 +67,7 @@ object ScalaClassLoader { @sharable private[this] val bootClassLoader: ClassLoader = if scala.util.Properties.isJavaAtLeast("9") then try - ClassLoader.getSystemClassLoader.getParent + ClassLoader.getSystemClassLoader.getParent catch case _: Throwable => null else null diff --git a/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala b/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala index b33ba14b9e70..5fac91124187 100644 --- a/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala @@ -20,6 +20,4 @@ final class ExprImpl(val tree: tpd.Tree, val scope: Scope) extends Expr[Any] { } override def hashCode(): Int = tree.hashCode() - - override def toString: String = "'{ ... }" } diff --git a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala index d85d92de5455..5477628a30a3 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala @@ -1,7 +1,6 @@ package scala.quoted package runtime.impl - import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Flags.* @@ -9,6 +8,7 @@ import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.util.optional /** Matches a quoted tree against a quoted pattern tree. * A quoted pattern tree may have type and term holes in addition to normal terms. @@ -103,12 +103,13 @@ import dotty.tools.dotc.core.Symbols.* object QuoteMatcher { import tpd.* - // TODO improve performance - // TODO use flag from Context. Maybe -debug or add -debug-macros private inline val debug = false - import Matching._ + /** Sequence of matched expressions. + * These expressions are part of the scrutinee and will be bound to the quote pattern term splices. + */ + type MatchingExprs = Seq[MatchResult] /** A map relating equivalent symbols from the scrutinee and the pattern * For example in @@ -121,32 +122,34 @@ object QuoteMatcher { private def withEnv[T](env: Env)(body: Env ?=> T): T = body(using env) - def treeMatch(scrutineeTerm: Tree, patternTerm: Tree)(using Context): Option[Tuple] = + def treeMatch(scrutineeTree: Tree, patternTree: Tree)(using Context): Option[MatchingExprs] = given Env = Map.empty - scrutineeTerm =?= patternTerm + optional: + scrutineeTree =?= patternTree /** Check that all trees match with `mtch` and concatenate the results with &&& */ - private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => Matching): Matching = (l1, l2) match { + private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => MatchingExprs): optional[MatchingExprs] = (l1, l2) match { case (x :: xs, y :: ys) => mtch(x, y) &&& matchLists(xs, ys)(mtch) case (Nil, Nil) => matched case _ => notMatched } extension (scrutinees: List[Tree]) - private def =?= (patterns: List[Tree])(using Env, Context): Matching = + private def =?= (patterns: List[Tree])(using Env, Context): optional[MatchingExprs] = matchLists(scrutinees, patterns)(_ =?= _) extension (scrutinee0: Tree) /** Check that the trees match and return the contents from the pattern holes. - * Return None if the trees do not match otherwise return Some of a tuple containing all the contents in the holes. + * Return a sequence containing all the contents in the holes. + * If it does not match, continues to the `optional` with `None`. * * @param scrutinee The tree being matched * @param pattern The pattern tree that the scrutinee should match. Contains `patternHole` holes. * @param `summon[Env]` Set of tuples containing pairs of symbols (s, p) where s defines a symbol in `scrutinee` which corresponds to symbol p in `pattern`. - * @return `None` if it did not match or `Some(tup: Tuple)` if it matched where `tup` contains the contents of the holes. + * @return The sequence with the contents of the holes of the matched expression. */ - private def =?= (pattern0: Tree)(using Env, Context): Matching = + private def =?= (pattern0: Tree)(using Env, Context): optional[MatchingExprs] = /* Match block flattening */ // TODO move to cases /** Normalize the tree */ @@ -203,31 +206,12 @@ object QuoteMatcher { // Matches an open term and wraps it into a lambda that provides the free variables case Apply(TypeApply(Ident(_), List(TypeTree())), SeqLiteral(args, _) :: Nil) if pattern.symbol.eq(defn.QuotedRuntimePatterns_higherOrderHole) => - def hoasClosure = { - val names: List[TermName] = args.map { - case Block(List(DefDef(nme.ANON_FUN, _, _, Apply(Ident(name), _))), _) => name.asTermName - case arg => arg.symbol.name.asTermName - } - val argTypes = args.map(x => x.tpe.widenTermRefExpr) - val methTpe = MethodType(names)(_ => argTypes, _ => pattern.tpe) - val meth = newAnonFun(ctx.owner, methTpe) - def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { - val argsMap = args.map(_.symbol).zip(lambdaArgss.head).toMap - val body = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = - tree match - case tree: Ident => summon[Env].get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) - case tree => super.transform(tree) - }.transform(scrutinee) - TreeOps(body).changeNonLocalOwners(meth) - } - Closure(meth, bodyFn) - } + val env = summon[Env] val capturedArgs = args.map(_.symbol) - val captureEnv = summon[Env].filter((k, v) => !capturedArgs.contains(v)) + val captureEnv = env.filter((k, v) => !capturedArgs.contains(v)) withEnv(captureEnv) { scrutinee match - case ClosedPatternTerm(scrutinee) => matched(hoasClosure) + case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, args, env) case _ => notMatched } @@ -431,7 +415,6 @@ object QuoteMatcher { case _ => scrutinee val pattern = patternTree.symbol - devirtualizedScrutinee == pattern || summon[Env].get(devirtualizedScrutinee).contains(pattern) || devirtualizedScrutinee.allOverriddenSymbols.contains(pattern) @@ -452,32 +435,67 @@ object QuoteMatcher { accumulator.apply(Set.empty, term) } - /** Result of matching a part of an expression */ - private type Matching = Option[Tuple] - - private object Matching { - - def notMatched: Matching = None - - val matched: Matching = Some(Tuple()) - - def matched(tree: Tree)(using Context): Matching = - Some(Tuple1(new ExprImpl(tree, SpliceScope.getCurrent))) - - extension (self: Matching) - def asOptionOfTuple: Option[Tuple] = self - - /** Concatenates the contents of two successful matchings or return a `notMatched` */ - def &&& (that: => Matching): Matching = self match { - case Some(x) => - that match { - case Some(y) => Some(x ++ y) - case _ => None - } - case _ => None - } - end extension - - } + enum MatchResult: + /** Closed pattern extracted value + * @param tree Scrutinee sub-tree that matched + */ + case ClosedTree(tree: Tree) + /** HOAS pattern extracted value + * + * @param tree Scrutinee sub-tree that matched + * @param patternTpe Type of the pattern hole (from the pattern) + * @param args HOAS arguments (from the pattern) + * @param env Mapping between scrutinee and pattern variables + */ + case OpenTree(tree: Tree, patternTpe: Type, args: List[Tree], env: Env) + + /** Return the expression that was extracted from a hole. + * + * If it was a closed expression it returns that expression. Otherwise, + * if it is a HOAS pattern, the surrounding lambda is generated using + * `mapTypeHoles` to create the signature of the lambda. + * + * This expression is assumed to be a valid expression in the given splice scope. + */ + def toExpr(mapTypeHoles: TypeMap, spliceScope: Scope)(using Context): Expr[Any] = this match + case MatchResult.ClosedTree(tree) => + new ExprImpl(tree, spliceScope) + case MatchResult.OpenTree(tree, patternTpe, args, env) => + val names: List[TermName] = args.map { + case Block(List(DefDef(nme.ANON_FUN, _, _, Apply(Ident(name), _))), _) => name.asTermName + case arg => arg.symbol.name.asTermName + } + val paramTypes = args.map(x => mapTypeHoles(x.tpe.widenTermRefExpr)) + val methTpe = MethodType(names)(_ => paramTypes, _ => mapTypeHoles(patternTpe)) + val meth = newAnonFun(ctx.owner, methTpe) + def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { + val argsMap = args.view.map(_.symbol).zip(lambdaArgss.head).toMap + val body = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = + tree match + case tree: Ident => env.get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) + case tree => super.transform(tree) + }.transform(tree) + TreeOps(body).changeNonLocalOwners(meth) + } + val hoasClosure = Closure(meth, bodyFn) + new ExprImpl(hoasClosure, spliceScope) + + private inline def notMatched: optional[MatchingExprs] = + optional.break() + + private inline def matched: MatchingExprs = + Seq.empty + + private inline def matched(tree: Tree)(using Context): MatchingExprs = + Seq(MatchResult.ClosedTree(tree)) + + private def matchedOpen(tree: Tree, patternTpe: Type, args: List[Tree], env: Env)(using Context): MatchingExprs = + Seq(MatchResult.OpenTree(tree, patternTpe, args, env)) + + extension (self: MatchingExprs) + /** Concatenates the contents of two successful matchings */ + def &&& (that: MatchingExprs): MatchingExprs = self ++ that + end extension } diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index f8e439baeb0e..1949304ca287 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -8,15 +8,16 @@ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Annotations import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Types +import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.NameKinds +import dotty.tools.dotc.core.NameOps._ import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.quoted.reflect._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Types import dotty.tools.dotc.NoCompilationUnit - -import dotty.tools.dotc.quoted.{MacroExpansion, PickledQuotes} +import dotty.tools.dotc.quoted.MacroExpansion +import dotty.tools.dotc.quoted.PickledQuotes +import dotty.tools.dotc.quoted.reflect._ import scala.quoted.runtime.{QuoteUnpickler, QuoteMatching} import scala.quoted.runtime.impl.printers._ @@ -242,6 +243,14 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def unapply(cdef: ClassDef): (String, DefDef, List[Tree /* Term | TypeTree */], Option[ValDef], List[Statement]) = val rhs = cdef.rhs.asInstanceOf[tpd.Template] (cdef.name.toString, cdef.constructor, cdef.parents, cdef.self, rhs.body) + + def module(module: Symbol, parents: List[Tree /* Term | TypeTree */], body: List[Statement]): (ValDef, ClassDef) = { + val cls = module.moduleClass + val clsDef = ClassDef(cls, parents, body) + val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) + val modVal = ValDef(module, Some(newCls)) + (modVal, clsDef) + } end ClassDef given ClassDefMethods: ClassDefMethods with @@ -298,7 +307,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object ValDef extends ValDefModule: def apply(symbol: Symbol, rhs: Option[Term]): ValDef = - tpd.ValDef(symbol.asTerm, xCheckMacroedOwners(xCheckMacroValidExpr(rhs), symbol).getOrElse(tpd.EmptyTree)) + withDefaultPos(tpd.ValDef(symbol.asTerm, xCheckMacroedOwners(xCheckMacroValidExpr(rhs), symbol).getOrElse(tpd.EmptyTree))) def copy(original: Tree)(name: String, tpt: TypeTree, rhs: Option[Term]): ValDef = tpd.cpy.ValDef(original)(name.toTermName, tpt, xCheckMacroedOwners(xCheckMacroValidExpr(rhs), original.symbol).getOrElse(tpd.EmptyTree)) def unapply(vdef: ValDef): (String, TypeTree, Option[Term]) = @@ -362,16 +371,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Term extends TermModule: def betaReduce(tree: Term): Option[Term] = tree match - case app @ tpd.Apply(tpd.Select(fn, nme.apply), args) if dotc.core.Symbols.defn.isFunctionType(fn.tpe) => - val app1 = dotc.transform.BetaReduce(app, fn, args) - if app1 eq app then None - else Some(app1.withSpan(tree.span)) case tpd.Block(Nil, expr) => for e <- betaReduce(expr) yield tpd.cpy.Block(tree)(Nil, e) case tpd.Inlined(_, Nil, expr) => betaReduce(expr) case _ => - None + val tree1 = dotc.transform.BetaReduce(tree) + if tree1 eq tree then None + else Some(tree1.withSpan(tree.span)) + end Term given TermMethods: TermMethods with @@ -1474,7 +1482,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Bind extends BindModule: def apply(sym: Symbol, pattern: Tree): Bind = - tpd.Bind(sym, pattern) + withDefaultPos(tpd.Bind(sym, pattern)) def copy(original: Tree)(name: String, pattern: Tree): Bind = withDefaultPos(tpd.cpy.Bind(original)(name.toTermName, pattern)) def unapply(pattern: Bind): (String, Tree) = @@ -1573,8 +1581,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler self.nonEmpty && self.head.symbol.is(dotc.core.Flags.Implicit) def isGiven: Boolean = self.nonEmpty && self.head.symbol.is(dotc.core.Flags.Given) - def isErased: Boolean = - self.nonEmpty && self.head.symbol.is(dotc.core.Flags.Erased) + def isErased: Boolean = false + + def erasedArgs: List[Boolean] = + self.map(_.symbol.is(dotc.core.Flags.Erased)) + def hasErasedArgs: Boolean = + self.exists(_.symbol.is(dotc.core.Flags.Erased)) end TermParamClauseMethods type TypeParamClause = List[tpd.TypeDef] @@ -2131,9 +2143,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler given MethodTypeMethods: MethodTypeMethods with extension (self: MethodType) - def isErased: Boolean = self.isErasedMethod + def isErased: Boolean = false def isImplicit: Boolean = self.isImplicitMethod def param(idx: Int): TypeRepr = self.newParamRef(idx) + + def erasedParams: List[Boolean] = self.erasedParams + def hasErasedParams: Boolean = self.hasErasedParams end extension end MethodTypeMethods @@ -2159,11 +2174,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end extension end PolyTypeMethods - type TypeLambda = dotc.core.Types.TypeLambda + type TypeLambda = dotc.core.Types.HKTypeLambda object TypeLambdaTypeTest extends TypeTest[TypeRepr, TypeLambda]: def unapply(x: TypeRepr): Option[TypeLambda & x.type] = x match - case tpe: (Types.TypeLambda & x.type) => Some(tpe) + case tpe: (Types.HKTypeLambda & x.type) => Some(tpe) case _ => None end TypeLambdaTypeTest @@ -2395,7 +2410,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Implicits extends ImplicitsModule: def search(tpe: TypeRepr): ImplicitSearchResult = - ctx.typer.inferImplicitArg(tpe, Position.ofMacroExpansion.span) + import tpd.TreeOps + val implicitTree = ctx.typer.inferImplicitArg(tpe, Position.ofMacroExpansion.span) + // Make sure that we do not have any uninstantiated type variables. + // See tests/pos-macros/i16636. + // See tests/pos-macros/exprSummonWithTypeVar with -Xcheck-macros. + dotc.typer.Inferencing.fullyDefinedType(implicitTree.tpe, "", implicitTree) + implicitTree end Implicits type ImplicitSearchResult = Tree @@ -2481,6 +2502,21 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler for sym <- decls(cls) do cls.enter(sym) cls + def newModule(owner: Symbol, name: String, modFlags: Flags, clsFlags: Flags, parents: List[TypeRepr], decls: Symbol => List[Symbol], privateWithin: Symbol): Symbol = + assert(parents.nonEmpty && !parents.head.typeSymbol.is(dotc.core.Flags.Trait), "First parent must be a class") + val mod = dotc.core.Symbols.newNormalizedModuleSymbol( + owner, + name.toTermName, + modFlags | dotc.core.Flags.ModuleValCreationFlags, + clsFlags | dotc.core.Flags.ModuleClassCreationFlags, + parents, + dotc.core.Scopes.newScope, + privateWithin) + val cls = mod.moduleClass.asClass + cls.enter(dotc.core.Symbols.newConstructor(cls, dotc.core.Flags.Synthetic, Nil, Nil)) + for sym <- decls(cls) do cls.enter(sym) + mod + def newMethod(owner: Symbol, name: String, tpe: TypeRepr): Symbol = newMethod(owner, name, tpe, Flags.EmptyFlags, noSymbol) def newMethod(owner: Symbol, name: String, tpe: TypeRepr, flags: Flags, privateWithin: Symbol): Symbol = @@ -2490,6 +2526,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def newBind(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol = dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | Case, tpe) def noSymbol: Symbol = dotc.core.Symbols.NoSymbol + + def freshName(prefix: String): String = + NameKinds.MacroNames.fresh(prefix.toTermName).toString end Symbol given SymbolMethods: SymbolMethods with @@ -2513,6 +2552,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def name: String = self.denot.name.toString def fullName: String = self.denot.fullName.toString + def info: TypeRepr = self.denot.info + def pos: Option[Position] = if self.exists then Some(self.sourcePos) else None @@ -2619,13 +2660,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler case sym if sym.isType => sym.asType }.toList - def memberType(name: String): Symbol = typeMember(name) + def memberType(name: String): Symbol = + self.typeRef.decls.find(sym => sym.name == name.toTypeName) def typeMember(name: String): Symbol = - self.unforcedDecls.find(sym => sym.name == name.toTypeName) + lookupPrefix.member(name.toTypeName).symbol - def memberTypes: List[Symbol] = typeMembers + def memberTypes: List[Symbol] = + self.typeRef.decls.filter(_.isType) def typeMembers: List[Symbol] = - self.unforcedDecls.filter(_.isType) + lookupPrefix.typeMembers.map(_.symbol).toList def declarations: List[Symbol] = self.typeRef.info.decls.toList @@ -2654,7 +2697,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def show(using printer: Printer[Symbol]): String = printer.show(self) - def asQuotes: Nested = new QuotesImpl(using ctx.withOwner(self)) + def asQuotes: Nested = + assert(self.ownersIterator.contains(ctx.owner), s"$self is not owned by ${ctx.owner}") + new QuotesImpl(using ctx.withOwner(self)) end extension @@ -2729,7 +2774,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def SomeModule: Symbol = dotc.core.Symbols.defn.SomeClass.companionModule def ProductClass: Symbol = dotc.core.Symbols.defn.ProductClass def FunctionClass(arity: Int, isImplicit: Boolean = false, isErased: Boolean = false): Symbol = - dotc.core.Symbols.defn.FunctionSymbol(arity, isImplicit, isErased) + if arity < 0 then throw IllegalArgumentException(s"arity: $arity") + if isErased then + throw new Exception("Erased function classes are not supported. Use a refined `scala.runtime.ErasedFunction`") + else dotc.core.Symbols.defn.FunctionSymbol(arity, isImplicit) + def FunctionClass(arity: Int): Symbol = + FunctionClass(arity, false, false) + def FunctionClass(arity: Int, isContextual: Boolean): Symbol = + FunctionClass(arity, isContextual, false) + def ErasedFunctionClass = dotc.core.Symbols.defn.ErasedFunctionClass def TupleClass(arity: Int): Symbol = dotc.core.Symbols.defn.TupleType(arity).nn.classSymbol.asClass def isTupleClass(sym: Symbol): Boolean = @@ -2765,6 +2818,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def Invisible: Flags = dotc.core.Flags.Invisible def JavaDefined: Flags = dotc.core.Flags.JavaDefined def JavaStatic: Flags = dotc.core.Flags.JavaStatic + def JavaAnnotation: Flags = dotc.core.Flags.JavaAnnotation def Lazy: Flags = dotc.core.Flags.Lazy def Local: Flags = dotc.core.Flags.Local def Macro: Flags = dotc.core.Flags.Macro @@ -2784,7 +2838,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def Scala2x: Flags = dotc.core.Flags.Scala2x def Sealed: Flags = dotc.core.Flags.Sealed def StableRealizable: Flags = dotc.core.Flags.StableRealizable - def Static: Flags = dotc.core.Flags.JavaStatic + @deprecated("Use JavaStatic instead", "3.3.0") def Static: Flags = dotc.core.Flags.JavaStatic def Synthetic: Flags = dotc.core.Flags.Synthetic def Trait: Flags = dotc.core.Flags.Trait def Transparent: Flags = dotc.core.Flags.Transparent @@ -3026,7 +3080,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler lazy val ConstantCode: Printer[Constant] = new Printer[Constant]: def show(const: Constant): String = - const.show(using ctx.fresh.setSetting(ctx.settings.color, "never")) + const.show(using ctx.withoutColors) lazy val ConstantStructure: Printer[Constant] = new Printer[Constant]: def show(const: Constant): String = @@ -3053,14 +3107,14 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler new TypeImpl(tree, SpliceScope.getCurrent).asInstanceOf[scala.quoted.Type[T]] object ExprMatch extends ExprMatchModule: - def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: scala.quoted.Expr[Any])(using pattern: scala.quoted.Expr[Any]): Option[Tup] = + def unapply[TypeBindings, Tup <: Tuple](scrutinee: scala.quoted.Expr[Any])(using pattern: scala.quoted.Expr[Any]): Option[Tup] = val scrutineeTree = reflect.asTerm(scrutinee) val patternTree = reflect.asTerm(pattern) treeMatch(scrutineeTree, patternTree).asInstanceOf[Option[Tup]] end ExprMatch object TypeMatch extends TypeMatchModule: - def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: scala.quoted.Type[?])(using pattern: scala.quoted.Type[?]): Option[Tup] = + def unapply[TypeBindings, Tup <: Tuple](scrutinee: scala.quoted.Type[?])(using pattern: scala.quoted.Type[?]): Option[Tup] = val scrutineeTree = reflect.TypeTree.of(using scrutinee) val patternTree = reflect.TypeTree.of(using pattern) treeMatch(scrutineeTree, patternTree).asInstanceOf[Option[Tup]] @@ -3090,23 +3144,30 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler if typeHoles.isEmpty then ctx else val ctx1 = ctx.fresh.setFreshGADTBounds.addMode(dotc.core.Mode.GadtConstraintInference) - ctx1.gadt.addToConstraint(typeHoles) + ctx1.gadtState.addToConstraint(typeHoles) ctx1 - val matchings = QuoteMatcher.treeMatch(scrutinee, pat1)(using ctx1) - - if typeHoles.isEmpty then matchings - else { - // After matching and doing all subtype checks, we have to approximate all the type bindings - // that we have found, seal them in a quoted.Type and add them to the result - def typeHoleApproximation(sym: Symbol) = - val fromAboveAnnot = sym.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_fromAboveAnnot) - val fullBounds = ctx1.gadt.fullBounds(sym) - val tp = if fromAboveAnnot then fullBounds.hi else fullBounds.lo - reflect.TypeReprMethods.asType(tp) - matchings.map { tup => - Tuple.fromIArray(typeHoles.map(typeHoleApproximation).toArray.asInstanceOf[IArray[Object]]) ++ tup + // After matching and doing all subtype checks, we have to approximate all the type bindings + // that we have found, seal them in a quoted.Type and add them to the result + def typeHoleApproximation(sym: Symbol) = + val fromAboveAnnot = sym.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_fromAboveAnnot) + val fullBounds = ctx1.gadt.fullBounds(sym) + if fromAboveAnnot then fullBounds.hi else fullBounds.lo + + QuoteMatcher.treeMatch(scrutinee, pat1)(using ctx1).map { matchings => + import QuoteMatcher.MatchResult.* + lazy val spliceScope = SpliceScope.getCurrent + val typeHoleApproximations = typeHoles.map(typeHoleApproximation) + val typeHoleMapping = Map(typeHoles.zip(typeHoleApproximations)*) + val typeHoleMap = new Types.TypeMap { + def apply(tp: Types.Type): Types.Type = tp match + case Types.TypeRef(Types.NoPrefix, _) => typeHoleMapping.getOrElse(tp.typeSymbol, tp) + case _ => mapOver(tp) } + val matchedExprs = matchings.map(_.toExpr(typeHoleMap, spliceScope)) + val matchedTypes = typeHoleApproximations.map(reflect.TypeReprMethods.asType) + val results = matchedTypes ++ matchedExprs + Tuple.fromIArray(IArray.unsafeFromArray(results.toArray)) } } diff --git a/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala b/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala index 36da30e112c8..d4cea83efde8 100644 --- a/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala @@ -14,6 +14,4 @@ final class TypeImpl(val typeTree: tpd.Tree, val scope: Scope) extends Type[?] { } override def hashCode(): Int = typeTree.hashCode() - - override def toString: String = "Type.of[...]" } diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index 0bea8f0ab643..c229338ad228 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -57,7 +57,6 @@ object Extractors { if (flags.is(Flags.Scala2x)) flagList += "Flags.Scala2x" if (flags.is(Flags.Sealed)) flagList += "Flags.Sealed" if (flags.is(Flags.StableRealizable)) flagList += "Flags.StableRealizable" - if (flags.is(Flags.Static)) flagList += "Flags.javaStatic" if (flags.is(Flags.Synthetic)) flagList += "Flags.Synthetic" if (flags.is(Flags.Trait)) flagList += "Flags.Trait" if (flags.is(Flags.Transparent)) flagList += "Flags.Transparent" diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index 5d61902fbedd..a6a773adc9ba 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -57,7 +57,6 @@ object SourceCode { if (flags.is(Flags.Scala2x)) flagList += "scala2x" if (flags.is(Flags.Sealed)) flagList += "sealed" if (flags.is(Flags.StableRealizable)) flagList += "stableRealizable" - if (flags.is(Flags.Static)) flagList += "javaStatic" if (flags.is(Flags.Synthetic)) flagList += "synthetic" if (flags.is(Flags.Trait)) flagList += "trait" if (flags.is(Flags.Transparent)) flagList += "transparent" @@ -1346,18 +1345,22 @@ object SourceCode { } private def printBoundsTree(bounds: TypeBoundsTree)(using elideThis: Option[Symbol]): this.type = { - bounds.low match { - case Inferred() => - case low => - this += " >: " - printTypeTree(low) - } - bounds.hi match { - case Inferred() => this - case hi => - this += " <: " - printTypeTree(hi) - } + if bounds.low.tpe == bounds.hi.tpe then + this += " = " + printTypeTree(bounds.low) + else + bounds.low match { + case Inferred() => + case low => + this += " >: " + printTypeTree(low) + } + bounds.hi match { + case Inferred() => this + case hi => + this += " <: " + printTypeTree(hi) + } } private def printBounds(bounds: TypeBounds)(using elideThis: Option[Symbol]): this.type = { diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index 979fea0684b2..8f9a9bd69a50 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -148,11 +148,11 @@ class CoursierScalaTests: object CoursierScalaTests: - def execCmd(command: String, options: String*): List[String] = + def execCmd(command: String, options: String*): (Int, List[String]) = val cmd = (command :: options.toList).toSeq.mkString(" ") val out = new ListBuffer[String] - cmd.!(ProcessLogger(out += _, out += _)) - out.toList + val code = cmd.!(ProcessLogger(out += _, out += _)) + (code, out.toList) def csScalaCmd(options: String*): List[String] = csCmd("dotty.tools.MainGenericRunner", options*) @@ -166,10 +166,20 @@ object CoursierScalaTests: case Nil => args case _ => "--" +: args val newJOpts = jOpts.map(s => s"--java-opt ${s.stripPrefix("-J")}").mkString(" ") - execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true"""" +: newOptions)*) + execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true"""" +: newOptions)*)._2 /** Get coursier script */ @BeforeClass def setup(): Unit = - val ver = execCmd("uname").head.replace('L', 'l').replace('D', 'd') - execCmd("curl", s"-fLo cs https://git.io/coursier-cli-$ver") #&& execCmd("chmod", "+x cs") - + val launcherLocation = "https://github.com/coursier/launchers/raw/master" + val launcherName = execCmd("uname")._2.head.toLowerCase match + case "linux" => "cs-x86_64-pc-linux" + case "darwin" => "cs-x86_64-apple-darwin" + case other => fail(s"Unsupported OS for coursier launcher: $other") + + def runAndCheckCmd(cmd: String, options: String*): Unit = + val (code, out) = execCmd(cmd, options*) + if code != 0 then + fail(s"Failed to run $cmd ${options.mkString(" ")}, exit code: $code, output: ${out.mkString("\n")}") + + runAndCheckCmd("curl", s"-fLo cs $launcherLocation/$launcherName") + runAndCheckCmd("chmod", "+x cs") diff --git a/compiler/test-resources/repl-macros/i15104a b/compiler/test-resources/repl-macros/i15104a new file mode 100644 index 000000000000..92e82928b509 --- /dev/null +++ b/compiler/test-resources/repl-macros/i15104a @@ -0,0 +1,7 @@ +scala> import scala.quoted._ +scala> object Foo { def macroImpl(using Quotes) = Expr(1) } +// defined object Foo +scala> inline def foo = ${ Foo.macroImpl } +def foo: Int +scala> foo +val res0: Int = 1 diff --git a/compiler/test-resources/repl-macros/i15104b b/compiler/test-resources/repl-macros/i15104b new file mode 100644 index 000000000000..ebbdb2402076 --- /dev/null +++ b/compiler/test-resources/repl-macros/i15104b @@ -0,0 +1,5 @@ +scala> import scala.quoted._ +scala> object Foo { def macroImpl(using Quotes) = Expr(1); inline def foo = ${ Foo.macroImpl } } +// defined object Foo +scala> Foo.foo +val res0: Int = 1 diff --git a/compiler/test-resources/repl-macros/i15104c b/compiler/test-resources/repl-macros/i15104c new file mode 100644 index 000000000000..482b9487c9d9 --- /dev/null +++ b/compiler/test-resources/repl-macros/i15104c @@ -0,0 +1,7 @@ +scala> import scala.quoted._ +scala> def macroImpl(using Quotes) = Expr(1) +def macroImpl(using x$1: quoted.Quotes): quoted.Expr[Int] +scala> inline def foo = ${ macroImpl } +def foo: Int +scala> foo +val res0: Int = 1 diff --git a/compiler/test-resources/repl-macros/i5551 b/compiler/test-resources/repl-macros/i5551 index fb039ed19dd6..d71251e9a824 100644 --- a/compiler/test-resources/repl-macros/i5551 +++ b/compiler/test-resources/repl-macros/i5551 @@ -1,8 +1,7 @@ scala> import scala.quoted._ scala> def assertImpl(expr: Expr[Boolean])(using q: Quotes) = '{ if !($expr) then throw new AssertionError("failed assertion")} def assertImpl - (expr: quoted.Expr[Boolean]) - (using q: quoted.Quotes): quoted.Expr[Unit] + (expr: quoted.Expr[Boolean])(using q: quoted.Quotes): quoted.Expr[Unit] scala> inline def assert(expr: => Boolean): Unit = ${ assertImpl('{expr}) } def assert(expr: => Boolean): Unit diff --git a/compiler/test-resources/repl/i11377 b/compiler/test-resources/repl/i11377 deleted file mode 100644 index 4e971fb89749..000000000000 --- a/compiler/test-resources/repl/i11377 +++ /dev/null @@ -1,14 +0,0 @@ -scala> val smallArray = Array.fill(100)(0) -val smallArray: Array[Int] = Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) -scala> val bigArray = Array.fill(10000)(0) -val bigArray: Array[Int] = Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ... large output truncated, print value to show all -scala> val notTruncated = "!" * 999 -val notTruncated: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -scala> val onTruncationLimit = "!" * 1000 -val onTruncationLimit: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -scala> val onTruncationLimitPlus = "!" * 1001 -val onTruncationLimitPlus: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ... large output truncated, print value to show all -scala> val veryBigTruncated = "!" * 10000 -val veryBigTruncated: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ... large output truncated, print value to show all -scala> val beh = "\u08A0"*10000 -val beh: String = ࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠ ... large output truncated, print value to show all diff --git a/compiler/test-resources/repl/i1370 b/compiler/test-resources/repl/i1370 index 6582e03b6539..4bd92b4d5f83 100644 --- a/compiler/test-resources/repl/i1370 +++ b/compiler/test-resources/repl/i1370 @@ -1,5 +1,5 @@ scala> object Lives { class Private { def foo1: Any = new Private.C1; def foo2: Any = new Private.C2 }; object Private { class C1 private {}; private class C2 {} } } --- Error: ---------------------------------------------------------------------- +-- [E173] Reference Error: ----------------------------------------------------- 1 | object Lives { class Private { def foo1: Any = new Private.C1; def foo2: Any = new Private.C2 }; object Private { class C1 private {}; private class C2 {} } } | ^^^^^^^^^^ |constructor C1 cannot be accessed as a member of Lives.Private.C1 from class Private. diff --git a/compiler/test-resources/repl/i15493 b/compiler/test-resources/repl/i15493 index f543f5c1d0f7..670cf8ebcbd2 100644 --- a/compiler/test-resources/repl/i15493 +++ b/compiler/test-resources/repl/i15493 @@ -142,3 +142,8 @@ val res33: Outer.Foo = Outer$Foo@17 scala> res33.toString val res34: String = Outer$Foo@17 +scala> Vector.unapplySeq(Vector(2)) +val res35: scala.collection.SeqFactory.UnapplySeqWrapper[Int] = scala.collection.SeqFactory$UnapplySeqWrapper@df507bfd + +scala> new scala.concurrent.duration.DurationInt(5) +val res36: scala.concurrent.duration.package.DurationInt = scala.concurrent.duration.package$DurationInt@5 diff --git a/compiler/test-resources/repl/i4184 b/compiler/test-resources/repl/i4184 index 2c4eb7d12a6f..06b2c81ece21 100644 --- a/compiler/test-resources/repl/i4184 +++ b/compiler/test-resources/repl/i4184 @@ -5,8 +5,11 @@ scala> object bar { class Foo } scala> implicit def eqFoo: CanEqual[foo.Foo, foo.Foo] = CanEqual.derived def eqFoo: CanEqual[foo.Foo, foo.Foo] scala> object Bar { new foo.Foo == new bar.Foo } --- Error: ---------------------------------------------------------------------- +-- [E172] Type Error: ---------------------------------------------------------- 1 | object Bar { new foo.Foo == new bar.Foo } | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Values of types foo.Foo and bar.Foo cannot be compared with == or != -1 error found + | Values of types foo.Foo and bar.Foo² cannot be compared with == or != + | + | where: Foo is a class in object foo + | Foo² is a class in object bar +1 error found \ No newline at end of file diff --git a/compiler/test-resources/repl/i7644 b/compiler/test-resources/repl/i7644 index 8ceaf8b00804..786823073470 100644 --- a/compiler/test-resources/repl/i7644 +++ b/compiler/test-resources/repl/i7644 @@ -5,11 +5,7 @@ scala> class T extends CanEqual | Cannot extend sealed trait CanEqual in a different source file | | longer explanation available when compiling with `-explain` --- [E056] Syntax Error: -------------------------------------------------------- -1 | class T extends CanEqual - | ^^^^^^^^ - | Missing type parameter for CanEqual -2 errors found +1 error found scala> class T extends CanEqual -- [E112] Syntax Error: -------------------------------------------------------- 1 | class T extends CanEqual @@ -17,8 +13,5 @@ scala> class T extends CanEqual | Cannot extend sealed trait CanEqual in a different source file | | longer explanation available when compiling with `-explain` --- [E056] Syntax Error: -------------------------------------------------------- -1 | class T extends CanEqual - | ^^^^^^^^ - | Missing type parameter for CanEqual -2 errors found +1 error found + diff --git a/compiler/test-resources/repl/settings-repl-disable-display b/compiler/test-resources/repl/settings-repl-disable-display new file mode 100644 index 000000000000..ba2c1c64574b --- /dev/null +++ b/compiler/test-resources/repl/settings-repl-disable-display @@ -0,0 +1,12 @@ +scala> 1 +val res0: Int = 1 + +scala>:settings -Xrepl-disable-display + +scala> 2 + +scala>:reset +Resetting REPL state. + +scala> 3 +val res0: Int = 3 \ No newline at end of file diff --git a/compiler/test-resources/repl/settings-repl-max-print-both-truncation-settings b/compiler/test-resources/repl/settings-repl-max-print-both-truncation-settings new file mode 100644 index 000000000000..a7f7d6c10dd6 --- /dev/null +++ b/compiler/test-resources/repl/settings-repl-max-print-both-truncation-settings @@ -0,0 +1,10 @@ +scala> Seq(1,2,3) +val res0: Seq[Int] = List(1, 2, 3) + +scala>:settings -Vrepl-max-print-elements:2 + +scala>:settings -Vrepl-max-print-characters:50 + +scala> Seq(1,2,3) +val res1: Seq[Int] = List(1, 2) ... large output truncated, print value to show all + diff --git a/compiler/test-resources/repl/settings-repl-max-print-characters b/compiler/test-resources/repl/settings-repl-max-print-characters new file mode 100644 index 000000000000..9263680b95cc --- /dev/null +++ b/compiler/test-resources/repl/settings-repl-max-print-characters @@ -0,0 +1,7 @@ +scala> 1.to(10).mkString +val res0: String = 12345678910 + +scala>:settings -Vrepl-max-print-characters:10 + +scala> 1.to(10).mkString +val res1: String = 123456789 ... large output truncated, print value to show all diff --git a/compiler/test-resources/repl/settings-repl-max-print-elements b/compiler/test-resources/repl/settings-repl-max-print-elements new file mode 100644 index 000000000000..b203e689f020 --- /dev/null +++ b/compiler/test-resources/repl/settings-repl-max-print-elements @@ -0,0 +1,7 @@ +scala> 1.to(200).toList +val res0: List[Int] = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200) + +scala>:settings -Vrepl-max-print-elements:20 + +scala> 1.to(300).toList +val res1: List[Int] = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20) ... large output truncated, print value to show all diff --git a/compiler/test-resources/type-printer/source-compatible b/compiler/test-resources/type-printer/source-compatible new file mode 100644 index 000000000000..d0773a11a795 --- /dev/null +++ b/compiler/test-resources/type-printer/source-compatible @@ -0,0 +1,17 @@ +scala> case class Bag() extends reflect.Selectable +// defined case class Bag +scala> val m = new Bag { val f = 23; def g = 47; def h(i: Int): Int = i; var i = 101; type N = Int; val l = List(42); def p[T](t: T) = t.toString() } +val m: + Bag{ + val f: Int; def g: Int; def h(i: Int): Int; val i: Int; + def i_=(x$1: Int): Unit; type N = Int; val l: List[Int]; + def p[T](t: T): String + } = Bag() +scala> type t = Bag { val f: Int; def g: Int; def h(i: Int): Int; val i: Int; def i_=(x$1: Int): Unit; type N = Int; val l: List[Int]; val s: String @unchecked } +// defined alias type t + = + Bag{ + val f: Int; def g: Int; def h(i: Int): Int; val i: Int; + def i_=(x$1: Int): Unit; type N = Int; val l: List[Int]; + val s: String @unchecked + } diff --git a/compiler/test/dotc/comptest.scala b/compiler/test/dotc/comptest.scala index bd0d800e641c..fb53f561a94d 100644 --- a/compiler/test/dotc/comptest.scala +++ b/compiler/test/dotc/comptest.scala @@ -12,6 +12,7 @@ object comptest extends ParallelTesting { def isInteractive = true def testFilter = Nil def updateCheckFiles: Boolean = false + def failedTests = None val posDir = "./tests/pos/" val negDir = "./tests/neg/" diff --git a/compiler/test/dotc/pos-lazy-vals-tests.allowlist b/compiler/test/dotc/pos-lazy-vals-tests.allowlist new file mode 100644 index 000000000000..21667a9265d7 --- /dev/null +++ b/compiler/test/dotc/pos-lazy-vals-tests.allowlist @@ -0,0 +1,37 @@ +Repeated.scala +byname-implicits-8.scala +existentials.scala +i1235.scala +i13332super.scala +i13349.scala +i13460.scala +i14626.scala +i1753.scala +i4031.scala +i4328.scala +i6450.scala +i6565.scala +i8031.scala +i8111.scala +i8900-unflip.scala +lazyvals.scala +singletons.scala +spec-traits.scala +spurious-overload.scala +t1591_pos.scala +t2910.scala +t3411.scala +t3420.scala +t3452f.scala +t3670.scala +t3927.scala +t4432.scala +t4716.scala +t4717.scala +t5099.scala +t5796.scala +t6278-synth-def.scala +t6925b.scala +t7011.scala +t8306.scala +zipped.scala diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 48ec9b7f4766..d1dd83f36ff7 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -2,7 +2,6 @@ i94-nada.scala i1812.scala i1867.scala i3067.scala -t247.scala t2712-5.scala t284-pos.scala t3249 @@ -20,6 +19,8 @@ i12299a.scala i13871.scala i15181.scala i15922.scala +t5031_2.scala +i16997.scala # Tree is huge and blows stack for printing Text i7034.scala @@ -46,6 +47,7 @@ i9999.scala i6505.scala i15158.scala i15155.scala +i15827.scala # Opaque type i5720.scala @@ -79,6 +81,12 @@ i2797a # allows to simplify a type that was already computed i13842.scala +# Position change under captureChecking +boxmap-paper.scala + +# Function types print differnt after unpickling since test mispredicts Feature.preFundsEnabled +caps-universal.scala + # GADT cast applied to singleton type difference i4176-gadt.scala diff --git a/compiler/test/dotc/run-lazy-vals-tests.allowlist b/compiler/test/dotc/run-lazy-vals-tests.allowlist new file mode 100644 index 000000000000..361795bcc5fd --- /dev/null +++ b/compiler/test/dotc/run-lazy-vals-tests.allowlist @@ -0,0 +1,65 @@ +IArrayOps.scala +Lazies1.scala +Lazies2.scala +OrderingTest.scala +anon-mirror-gen-local.scala +byname-implicits-28.scala +byname-implicits-30.scala +byname-implicits-5.scala +exports.scala +i13146.scala +i13146a.scala +i13332a.scala +i13332shapeless.scala +i13358.scala +i1692.scala +i1692b.scala +i1856.scala +i2266.scala +i2275.scala +i4451.scala +i4559.scala +i5340.scala +i5350.scala +i7675.scala +i9473.scala +isInstanceOf-eval.scala +lazy-exprs.scala +lazy-impl.scala +lazy-implicit-lists.scala +lazy-override-run.scala +lazy-traits.scala +lazyVals.scala +lazyVals_c3.0.0.scala +lazyVals_c3.1.0.scala +nonLocalReturns.scala +nothing-lazy-val.scala +null-lazy-val.scala +patmatch-classtag.scala +priorityQueue.scala +serialization-new-legacy.scala +singletons.scala +statics.scala +stream_flatmap_odds.scala +t1535.scala +t1591.scala +t2333.scala +t3038.scala +t3670.scala +t3699.scala +t3877.scala +t3895.scala +t3980.scala +t429.scala +t5552.scala +t5610a.scala +t603.scala +t6272.scala +t6443-by-name.scala +t6443-varargs.scala +t704.scala +t7406.scala +t8245.scala +unapply.scala +unit-lazy-val.scala +view-iterator-stream.scala diff --git a/compiler/test/dotty/Properties.scala b/compiler/test/dotty/Properties.scala index f4e0ed5f615f..cc47303d5468 100644 --- a/compiler/test/dotty/Properties.scala +++ b/compiler/test/dotty/Properties.scala @@ -13,6 +13,10 @@ object Properties { prop == null || prop == "TRUE" } + /** If property is unset or FALSE we consider it `false` */ + private def propIsTrue(name: String): Boolean = + sys.props.getOrElse(name, "FALSE") == "TRUE" + /** Are we running on the CI? */ val isRunByCI: Boolean = sys.env.isDefinedAt("DOTTY_CI_RUN") || sys.env.isDefinedAt("DRONE") // TODO remove this when we drop Drone @@ -30,9 +34,11 @@ object Properties { */ val testsFilter: List[String] = sys.props.get("dotty.tests.filter").fold(Nil)(_.split(',').toList) + /** Run only failed tests */ + val rerunFailed: Boolean = propIsTrue("dotty.tests.rerunFailed") + /** Tests should override the checkfiles with the current output */ - val testsUpdateCheckfile: Boolean = - sys.props.getOrElse("dotty.tests.updateCheckfiles", "FALSE") == "TRUE" + val testsUpdateCheckfile: Boolean = propIsTrue("dotty.tests.updateCheckfiles") /** When set, the run tests are only compiled - not run, a warning will be * issued @@ -85,6 +91,9 @@ object Properties { /** jline-reader jar */ def jlineReader: String = sys.props("dotty.tests.classes.jlineReader") + /** scalajs-javalib jar */ + def scalaJSJavalib: String = sys.props("dotty.tests.classes.scalaJSJavalib") + /** scalajs-library jar */ def scalaJSLibrary: String = sys.props("dotty.tests.classes.scalaJSLibrary") } diff --git a/compiler/test/dotty/tools/AnnotationsTests.scala b/compiler/test/dotty/tools/AnnotationsTests.scala index 59e9f3129294..3998bf7c93c0 100644 --- a/compiler/test/dotty/tools/AnnotationsTests.scala +++ b/compiler/test/dotty/tools/AnnotationsTests.scala @@ -89,3 +89,9 @@ class AnnotationsTest: s"A missing annotation while parsing a Java class should be silently ignored but: ${ctx.reporter.summary}") } } + + @Test def hasNativeAnnot: Unit = + inCompilerContext(TestConfiguration.basicClasspath) { + val term: TermSymbol = requiredClass("java.lang.invoke.MethodHandle").requiredMethod("invokeExact") + assert(term.hasAnnotation(defn.NativeAnnot), i"${term.annotations}") + } diff --git a/compiler/test/dotty/tools/TestSources.scala b/compiler/test/dotty/tools/TestSources.scala index c4d36b16c90b..6961a61b69b6 100644 --- a/compiler/test/dotty/tools/TestSources.scala +++ b/compiler/test/dotty/tools/TestSources.scala @@ -13,21 +13,26 @@ object TestSources { def posFromTastyBlacklistFile: String = "compiler/test/dotc/pos-from-tasty.blacklist" def posTestPicklingBlacklistFile: String = "compiler/test/dotc/pos-test-pickling.blacklist" - def posTestRecheckExcludesFile = "compiler/test/dotc/pos-test-recheck.excludes" + def posTestRecheckExcludesFile: String = "compiler/test/dotc/pos-test-recheck.excludes" + def posLazyValsAllowlistFile: String = "compiler/test/dotc/pos-lazy-vals-tests.allowlist" def posFromTastyBlacklisted: List[String] = loadList(posFromTastyBlacklistFile) def posTestPicklingBlacklisted: List[String] = loadList(posTestPicklingBlacklistFile) - def posTestRecheckExcluded = loadList(posTestRecheckExcludesFile) + def posTestRecheckExcluded: List[String] = loadList(posTestRecheckExcludesFile) + def posLazyValsAllowlist: List[String] = loadList(posLazyValsAllowlistFile) // run tests lists def runFromTastyBlacklistFile: String = "compiler/test/dotc/run-from-tasty.blacklist" def runTestPicklingBlacklistFile: String = "compiler/test/dotc/run-test-pickling.blacklist" - def runTestRecheckExcludesFile = "compiler/test/dotc/run-test-recheck.excludes" + def runTestRecheckExcludesFile: String = "compiler/test/dotc/run-test-recheck.excludes" + def runLazyValsAllowlistFile: String = "compiler/test/dotc/run-lazy-vals-tests.allowlist" + def runFromTastyBlacklisted: List[String] = loadList(runFromTastyBlacklistFile) def runTestPicklingBlacklisted: List[String] = loadList(runTestPicklingBlacklistFile) - def runTestRecheckExcluded = loadList(runTestRecheckExcludesFile) + def runTestRecheckExcluded: List[String] = loadList(runTestRecheckExcludesFile) + def runLazyValsAllowlist: List[String] = loadList(runLazyValsAllowlistFile) // load lists diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index 2c618ea91e96..ac4ba3ee0e75 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -597,7 +597,7 @@ class DottyBytecodeTests extends DottyBytecodeTest { val clsIn = dir.lookupName("Test.class", directory = false).input val clsNode = loadClassNode(clsIn) val method = getMethod(clsNode, "test") - assertEquals(88, instructionsFromMethod(method).size) + assertEquals(23, instructionsFromMethod(method).size) } } @@ -1622,7 +1622,6 @@ class DottyBytecodeTests extends DottyBytecodeTest { val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) val expected = List( - LineNumber(2, Label(0)), LineNumber(3, Label(0)), LineNumber(4, Label(5)), // case y => LineNumber(5, Label(9)), @@ -1664,7 +1663,6 @@ class DottyBytecodeTests extends DottyBytecodeTest { val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) val expected = List( - LineNumber(2, Label(0)), LineNumber(3, Label(0)), LineNumber(4, Label(5)), // case a if a == 3 => LineNumber(5, Label(15)), diff --git a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala index ea9009de1d9e..6173842e9ad1 100644 --- a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala @@ -163,28 +163,27 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(6, Label(0)), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(8, Label(6)), + Label(5), + LineNumber(8, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(3, Label(11)), + Label(10), + LineNumber(3, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "tracking"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(16), - LineNumber(10, Label(16)), + Label(15), + LineNumber(10, Label(15)), VarOp(ALOAD, 0), Ldc(LDC, "inner"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(22) + Label(21) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -228,23 +227,22 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(12, Label(0)), LineNumber(7, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(3, Label(6)), + Label(5), + LineNumber(3, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(14, Label(11)), + Label(10), + LineNumber(14, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(17) + Label(16) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -288,23 +286,22 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(12, Label(0)), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(8, Label(6)), + Label(5), + LineNumber(8, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "fgh"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(14, Label(11)), + Label(10), + LineNumber(14, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(17) + Label(16) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -349,23 +346,22 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(13, Label(0)), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(3, Label(6)), + Label(5), + LineNumber(3, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(15, Label(11)), + Label(10), + LineNumber(15, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(17) + Label(16) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -582,6 +578,63 @@ class InlineBytecodeTests extends DottyBytecodeTest { } } + @Test def beta_reduce_polymorphic_function = { + val source = """class Test: + | def test = + | ([Z] => (arg: Z) => { val a: Z = arg; a }).apply[Int](2) + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = + List( + Op(ICONST_2), + VarOp(ISTORE, 1), + VarOp(ILOAD, 1), + Op(IRETURN) + ) + + assert(instructions == expected, + "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + + } + } + + @Test def beta_reduce_function_of_opaque_types = { + val source = """object foo: + | opaque type T = Int + | inline def apply(inline op: T => T): T = op(2) + | + |class Test: + | def test = foo { n => n } + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = + List( + Field(GETSTATIC, "foo$", "MODULE$", "Lfoo$;"), + VarOp(ASTORE, 1), + VarOp(ALOAD, 1), + VarOp(ASTORE, 2), + Op(ICONST_2), + Op(IRETURN), + ) + + assert(instructions == expected, + "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + + } + } + @Test def i9456 = { val source = """class Foo { | def test: Int = inline2(inline1(2.+)) @@ -600,13 +653,7 @@ class InlineBytecodeTests extends DottyBytecodeTest { val instructions = instructionsFromMethod(fun) val expected = // TODO room for constant folding List( - Op(ICONST_1), - VarOp(ISTORE, 1), - Op(ICONST_2), - VarOp(ILOAD, 1), - Op(IADD), - Op(ICONST_3), - Op(IADD), + IntOp(BIPUSH, 6), Op(IRETURN), ) assert(instructions == expected, diff --git a/compiler/test/dotty/tools/backend/jvm/LabelBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/LabelBytecodeTests.scala new file mode 100644 index 000000000000..aea567b87f91 --- /dev/null +++ b/compiler/test/dotty/tools/backend/jvm/LabelBytecodeTests.scala @@ -0,0 +1,166 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import org.junit.Assert._ +import org.junit.Test + +import scala.tools.asm +import asm._ +import asm.tree._ + +import scala.tools.asm.Opcodes +import scala.jdk.CollectionConverters._ +import Opcodes._ + +class LabelBytecodeTests extends DottyBytecodeTest { + import ASMConverters._ + + @Test def localLabelBreak = { + testLabelBytecodeEquals( + """val local = boundary.Label[Long]() + |try break(5L)(using local) + |catch case ex: boundary.Break[Long] @unchecked => + | if ex.label eq local then ex.value + | else throw ex + """.stripMargin, + "Long", + Ldc(LDC, 5), + Op(LRETURN) + ) + } + + @Test def simpleBoundaryBreak = { + testLabelBytecodeEquals( + """boundary: l ?=> + | break(2)(using l) + """.stripMargin, + "Int", + Op(ICONST_2), + Op(IRETURN) + ) + + testLabelBytecodeEquals( + """boundary: + | break(3) + """.stripMargin, + "Int", + Op(ICONST_3), + Op(IRETURN) + ) + + testLabelBytecodeEquals( + """boundary: + | break() + """.stripMargin, + "Unit", + Op(RETURN) + ) + } + + @Test def labelExtraction = { + // Test extra Inlined around the label + testLabelBytecodeEquals( + """boundary: + | break(2)(using summon[boundary.Label[Int]]) + """.stripMargin, + "Int", + Op(ICONST_2), + Op(IRETURN) + ) + + // Test extra Block around the label + testLabelBytecodeEquals( + """boundary: l ?=> + | break(2)(using { l }) + """.stripMargin, + "Int", + Op(ICONST_2), + Op(IRETURN) + ) + } + + @Test def boundaryLocalBreak = { + testLabelBytecodeExpect( + """val x: Boolean = true + |boundary[Unit]: + | var i = 0 + | while true do + | i += 1 + | if i > 10 then break() + """.stripMargin, + "Unit", + !throws(_) + ) + } + + @Test def boundaryNonLocalBreak = { + testLabelBytecodeExpect( + """boundary[Unit]: + | nonLocalBreak() + """.stripMargin, + "Unit", + throws + ) + + testLabelBytecodeExpect( + """boundary[Unit]: + | def f() = break() + | f() + """.stripMargin, + "Unit", + throws + ) + } + + @Test def boundaryLocalAndNonLocalBreak = { + testLabelBytecodeExpect( + """boundary[Unit]: l ?=> + | break() + | nonLocalBreak() + """.stripMargin, + "Unit", + throws + ) + } + + private def throws(instructions: List[Instruction]): Boolean = + instructions.exists { + case Op(ATHROW) => true + case _ => false + } + + private def testLabelBytecodeEquals(code: String, tpe: String, expected: Instruction*): Unit = + checkLabelBytecodeInstructions(code, tpe) { instructions => + val expectedList = expected.toList + assert(instructions == expectedList, + "`test` was not properly generated\n" + diffInstructions(instructions, expectedList)) + } + + private def testLabelBytecodeExpect(code: String, tpe: String, expected: List[Instruction] => Boolean): Unit = + checkLabelBytecodeInstructions(code, tpe) { instructions => + assert(expected(instructions), + "`test` was not properly generated\n" + instructions) + } + + private def checkLabelBytecodeInstructions(code: String, tpe: String)(checkOutput: List[Instruction] => Unit): Unit = { + val source = + s"""import scala.util.boundary, boundary.break + |class Test: + | def test: $tpe = { + | ${code.linesIterator.toList.mkString("", "\n ", "")} + | } + | def nonLocalBreak[T](value: T)(using boundary.Label[T]): Nothing = break(value) + | def nonLocalBreak()(using boundary.Label[Unit]): Nothing = break(()) + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + val method = getMethod(clsNode, "test") + + checkOutput(instructionsFromMethod(method)) + } + } + +} diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index cce23cb5c9a6..8c8f0079e868 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -10,6 +10,7 @@ import org.junit.Assume._ import org.junit.experimental.categories.Category import scala.concurrent.duration._ +import reporting.TestReporter import vulpix._ import java.nio.file._ @@ -35,6 +36,12 @@ class BootstrappedOnlyCompilationTests { ).checkCompile() } + @Test def posWithCompilerCC: Unit = + implicit val testGroup: TestGroup = TestGroup("compilePosWithCompilerCC") + aggregateTests( + compileDir("tests/pos-with-compiler-cc/dotc", withCompilerOptions.and("-language:experimental.captureChecking")) + ).checkCompile() + @Test def posWithCompiler: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePosWithCompiler") aggregateTests( @@ -123,6 +130,8 @@ class BootstrappedOnlyCompilationTests { compileFilesInDir("tests/run-custom-args/Yretain-trees", defaultOptions and "-Yretain-trees"), compileFilesInDir("tests/run-custom-args/Yread-comments", defaultOptions and "-Yread-docs"), compileFilesInDir("tests/run-custom-args/run-macros-erased", defaultOptions.and("-language:experimental.erasedDefinitions").and("-Xcheck-macros")), + compileDir("tests/run-custom-args/Xmacro-settings/simple", defaultOptions.and("-Xmacro-settings:one,two,three")), + compileDir("tests/run-custom-args/Xmacro-settings/compileTimeEnv", defaultOptions.and("-Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO")), ) }.checkRuns() @@ -214,6 +223,7 @@ object BootstrappedOnlyCompilationTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 8d7a16dad8a4..ed531aa404c2 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -16,6 +16,7 @@ import scala.jdk.CollectionConverters._ import scala.util.matching.Regex import scala.concurrent.duration._ import TestSources.sources +import reporting.TestReporter import vulpix._ class CompilationTests { @@ -40,11 +41,14 @@ class CompilationTests { compileFilesInDir("tests/pos-special/isInstanceOf", allowDeepSubtypes.and("-Xfatal-warnings")), compileFilesInDir("tests/new", defaultOptions.and("-source", "3.2")), // just to see whether 3.2 works compileFilesInDir("tests/pos-scala2", scala2CompatMode), - compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-Ycc")), + compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), compileFilesInDir("tests/pos-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init")), + // Run tests for legacy lazy vals + compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/pos-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), + compileFilesInDir("tests/pos-custom-args/strict", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), compileFile( // succeeds despite -Xfatal-warnings because of -nowarn @@ -52,8 +56,6 @@ class CompilationTests { defaultOptions.and("-nowarn", "-Xfatal-warnings") ), compileFile("tests/pos-special/typeclass-scaling.scala", defaultOptions.and("-Xmax-inlines", "40")), - compileFile("tests/pos-special/i7296.scala", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), - compileDir("tests/pos-special/adhoc-extension", defaultOptions.and("-source", "future", "-feature", "-Xfatal-warnings")), compileFile("tests/pos-special/i7575.scala", defaultOptions.andLanguageFeature("dynamics")), compileFile("tests/pos-special/kind-projector.scala", defaultOptions.and("-Ykind-projector")), compileFile("tests/pos-special/kind-projector-underscores.scala", defaultOptions.and("-Ykind-projector:underscores")), @@ -62,7 +64,6 @@ class CompilationTests { compileFile("tests/pos-custom-args/i9267.scala", defaultOptions.and("-Ystop-after:erasure")), compileFile("tests/pos-special/extend-java-enum.scala", defaultOptions.and("-source", "3.0-migration")), compileFile("tests/pos-custom-args/help.scala", defaultOptions.and("-help", "-V", "-W", "-X", "-Y")), - compileFile("tests/pos-custom-args/i10383.scala", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), compileFile("tests/pos-custom-args/i13044.scala", defaultOptions.and("-Xmax-inlines:33")), compileFile("tests/pos-custom-args/jdk-8-app.scala", defaultOptions.and("-release:8")), ).checkCompile() @@ -139,29 +140,22 @@ class CompilationTests { compileFilesInDir("tests/neg-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), compileFilesInDir("tests/neg-custom-args/allow-double-bindings", allowDoubleBindings), compileFilesInDir("tests/neg-custom-args/allow-deep-subtypes", allowDeepSubtypes), + compileFilesInDir("tests/neg-custom-args/feature", defaultOptions.and("-Xfatal-warnings", "-feature")), compileFilesInDir("tests/neg-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), - compileFilesInDir("tests/neg-custom-args/captures", defaultOptions.and("-Ycc")), - compileDir("tests/neg-custom-args/impl-conv", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileDir("tests/neg-custom-args/i13946", defaultOptions.and("-Xfatal-warnings", "-feature")), + compileFilesInDir("tests/neg-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), + compileFilesInDir("tests/neg-custom-args/explain", defaultOptions.and("-explain")), compileFile("tests/neg-custom-args/avoid-warn-deprecation.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileFile("tests/neg-custom-args/implicit-conversions.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileFile("tests/neg-custom-args/implicit-conversions-old.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), compileFile("tests/neg-custom-args/i3246.scala", scala2CompatMode), compileFile("tests/neg-custom-args/overrideClass.scala", scala2CompatMode), compileFile("tests/neg-custom-args/ovlazy.scala", scala2CompatMode.and("-Xfatal-warnings")), compileFile("tests/neg-custom-args/newline-braces.scala", scala2CompatMode.and("-Xfatal-warnings")), compileFile("tests/neg-custom-args/autoTuplingTest.scala", defaultOptions.andLanguageFeature("noAutoTupling")), - compileFile("tests/neg-custom-args/nopredef.scala", defaultOptions.and("-Yno-predef")), - compileFile("tests/neg-custom-args/noimports.scala", defaultOptions.and("-Yno-imports")), - compileFile("tests/neg-custom-args/noimports2.scala", defaultOptions.and("-Yno-imports")), compileFile("tests/neg-custom-args/i1650.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i3882.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i4372.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i1754.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i12650.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i9517.scala", defaultOptions.and("-Xprint-types")), - compileFile("tests/neg-custom-args/i11637.scala", defaultOptions.and("-explain")), - compileFile("tests/neg-custom-args/i15575.scala", defaultOptions.and("-explain")), compileFile("tests/neg-custom-args/interop-polytypes.scala", allowDeepSubtypes.and("-Yexplicit-nulls")), compileFile("tests/neg-custom-args/conditionalWarnings.scala", allowDeepSubtypes.and("-deprecation").and("-Xfatal-warnings")), compileFilesInDir("tests/neg-custom-args/isInstanceOf", allowDeepSubtypes and "-Xfatal-warnings"), @@ -185,9 +179,7 @@ class CompilationTests { compileFile("tests/neg-custom-args/deptypes.scala", defaultOptions.and("-language:experimental.dependent")), compileFile("tests/neg-custom-args/matchable.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), compileFile("tests/neg-custom-args/i7314.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), - compileFile("tests/neg-custom-args/capt-wf.scala", defaultOptions.and("-Ycc", "-Xfatal-warnings")), - compileFile("tests/neg-custom-args/feature-shadowing.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileDir("tests/neg-custom-args/hidden-type-errors", defaultOptions.and("-explain")), + compileFile("tests/neg-custom-args/capt-wf.scala", defaultOptions.and("-language:experimental.captureChecking", "-Xfatal-warnings")), compileFile("tests/neg-custom-args/i13026.scala", defaultOptions.and("-print-lines")), compileFile("tests/neg-custom-args/i13838.scala", defaultOptions.and("-Ximplicit-search-limit", "1000")), compileFile("tests/neg-custom-args/jdk-9-app.scala", defaultOptions.and("-release:8")), @@ -211,11 +203,11 @@ class CompilationTests { compileFile("tests/run-custom-args/defaults-serizaliable-no-forwarders.scala", defaultOptions and "-Xmixin-force-forwarders:false"), compileFilesInDir("tests/run-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), compileFilesInDir("tests/run-custom-args/fatal-warnings", defaultOptions.and("-Xfatal-warnings")), - compileDir("tests/run-custom-args/Xmacro-settings/simple", defaultOptions.and("-Xmacro-settings:one,two,three")), - compileDir("tests/run-custom-args/Xmacro-settings/compileTimeEnv", defaultOptions.and("-Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO")), - compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-Ycc")), + compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking")), compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes), - compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init")) + compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init")), + // Run tests for legacy lazy vals. + compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.runLazyValsAllowlist)), ).checkRuns() } @@ -237,7 +229,8 @@ class CompilationTests { ).checkCompile() } - @Test def recheck: Unit = + //@Test disabled in favor of posWithCompilerCC to save time. + def recheck: Unit = given TestGroup = TestGroup("recheck") aggregateTests( compileFilesInDir("tests/new", recheckOptions), @@ -313,6 +306,7 @@ object CompilationTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/FromTastyTests.scala b/compiler/test/dotty/tools/dotc/FromTastyTests.scala index 2684a47b870c..1d46cbbce95c 100644 --- a/compiler/test/dotty/tools/dotc/FromTastyTests.scala +++ b/compiler/test/dotty/tools/dotc/FromTastyTests.scala @@ -5,6 +5,7 @@ package dotc import scala.language.unsafeNulls import org.junit.{AfterClass, Test} +import reporting.TestReporter import vulpix._ import java.io.{File => JFile} @@ -48,6 +49,7 @@ object FromTastyTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala index 84b3f1f8a48f..b515ebb05f96 100644 --- a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala +++ b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala @@ -12,6 +12,7 @@ import org.junit.{AfterClass, Test} import org.junit.experimental.categories.Category import scala.concurrent.duration._ +import reporting.TestReporter import vulpix._ @@ -76,6 +77,7 @@ object IdempotencyTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/SettingsTests.scala b/compiler/test/dotty/tools/dotc/SettingsTests.scala index e3076f055d51..8c571a321548 100644 --- a/compiler/test/dotty/tools/dotc/SettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/SettingsTests.scala @@ -179,6 +179,25 @@ class SettingsTests { assertEquals(100, foo.value) } + @Test def `Set BooleanSettings correctly`: Unit = + object Settings extends SettingGroup: + val foo = BooleanSetting("-foo", "foo", false) + val bar = BooleanSetting("-bar", "bar", true) + val baz = BooleanSetting("-baz", "baz", false) + val qux = BooleanSetting("-qux", "qux", false) + import Settings._ + + val args = List("-foo:true", "-bar:false", "-baz", "-qux:true", "-qux:false") + val summary = processArguments(args, processAll = true) + assertTrue(s"Setting args errors:\n ${summary.errors.take(5).mkString("\n ")}", summary.errors.isEmpty) + withProcessedArgs(summary) { + assertEquals(true, foo.value) + assertEquals(false, bar.value) + assertEquals(true, baz.value) + assertEquals(false, qux.value) + assertEquals(List("Flag -qux set repeatedly"), summary.warnings) + } + private def withProcessedArgs(summary: ArgsSummary)(f: SettingsState ?=> Unit) = f(using summary.sstate) extension [T](setting: Setting[T]) diff --git a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala index 7df64ad5bf3f..4dfc08cc7e9b 100644 --- a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala +++ b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala @@ -22,6 +22,7 @@ class StringFormatterTest extends AbstractStringFormatterTest: @Test def flagsSeq = check(", final", i"${Seq(JavaStatic, Final)}%, %") @Test def flagsTup = check("(,final)", i"${(JavaStatic, Final)}") @Test def seqOfTup2 = check("(final,given), (private,lazy)", i"${Seq((Final, Given), (Private, Lazy))}%, %") + @Test def seqOfTup3 = check("(Foo,given, (right is approximated))", i"${Seq((Foo, Given, TypeComparer.ApproxState.None.addHigh))}%, %") class StorePrinter extends Printer: var string: String = "" @@ -38,51 +39,11 @@ class StringFormatterTest extends AbstractStringFormatterTest: assertEquals("flags=private final ", store.string) end StringFormatterTest -class EmStringFormatterTest extends AbstractStringFormatterTest: - @Test def seq = check("[Any, String]", em"${Seq(defn.AnyType, defn.StringType)}") - @Test def seqSeq = check("Any; String", em"${Seq(defn.AnyType, defn.StringType)}%; %") - @Test def ellipsis = assert(em"$Big".contains("...")) - @Test def err = check("type Err", em"$Err") - @Test def ambig = check("Foo vs Foo", em"$Foo vs $Foo") - @Test def cstrd = check("Foo; Bar", em"$mkCstrd%; %") - @Test def seqErr = check("[class Any, type Err]", em"${Seq(defn.AnyClass, Err)}") - @Test def seqSeqErr = check("class Any; type Err", em"${Seq(defn.AnyClass, Err)}%; %") - @Test def tupleErr = check("(1,type Err)", em"${(1, Err)}") - @Test def tupleAmb = check("(Foo,Foo)", em"${(Foo, Foo)}") - @Test def tupleFlags = check("(Foo,abstract)", em"${(Foo, Abstract)}") - @Test def seqOfTupleFlags = check("[(Foo,abstract)]", em"${Seq((Foo, Abstract))}") -end EmStringFormatterTest - -class ExStringFormatterTest extends AbstractStringFormatterTest: - @Test def seq = check("[Any, String]", ex"${Seq(defn.AnyType, defn.StringType)}") - @Test def seqSeq = check("Any; String", ex"${Seq(defn.AnyType, defn.StringType)}%; %") - @Test def ellipsis = assert(ex"$Big".contains("...")) - @Test def err = check("type Err", ex"$Err") - @Test def ambig = check("""Foo vs Foo² - | - |where: Foo is a type - | Foo² is a type - |""".stripMargin, ex"$Foo vs $Foo") - @Test def cstrd = check("""Foo; Bar - | - |where: Bar is a type variable with constraint <: String - | Foo is a type variable with constraint <: Int - |""".stripMargin, ex"$mkCstrd%; %") - @Test def seqErr = check("[class Any, type Err]", ex"${Seq(defn.AnyClass, Err)}") - @Test def seqSeqErr = check("class Any; type Err", ex"${Seq(defn.AnyClass, Err)}%; %") - @Test def tupleErr = check("(1,type Err)", ex"${(1, Err)}") - @Test def tupleAmb = check("""(Foo,Foo²) - | - |where: Foo is a type - | Foo² is a type - |""".stripMargin, ex"${(Foo, Foo)}") -end ExStringFormatterTest - abstract class AbstractStringFormatterTest extends DottyTest: override def initializeCtx(fc: FreshContext) = super.initializeCtx(fc.setSetting(fc.settings.color, "never")) def Foo = newSymbol(defn.RootClass, typeName("Foo"), EmptyFlags, TypeBounds.empty).typeRef - def Err = newErrorSymbol(defn.RootClass, typeName("Err"), "") + def Err = newErrorSymbol(defn.RootClass, typeName("Err"), "".toMessage) def Big = (1 to 120).foldLeft(defn.StringType)((tp, i) => RefinedType(tp, typeName("A" * 69 + i), TypeAlias(defn.IntType))) def mkCstrd = diff --git a/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala index 9e71b10b206d..50e07f388dc4 100644 --- a/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala +++ b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala @@ -17,6 +17,7 @@ import scala.util.matching.Regex import scala.concurrent.duration._ import TestSources.sources import vulpix._ +import reporting.TestReporter class TastyBootstrapTests { import ParallelTesting._ @@ -114,6 +115,7 @@ object TastyBootstrapTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/TupleShowTests.scala b/compiler/test/dotty/tools/dotc/TupleShowTests.scala new file mode 100644 index 000000000000..2d76c480b001 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/TupleShowTests.scala @@ -0,0 +1,96 @@ +package dotty.tools +package dotc + +import core.*, Decorators.*, Symbols.* +import printing.Texts.* + +import java.lang.System.{ lineSeparator => EOL } +import org.junit.Test + +class TupleShowTests extends DottyTest: + def IntType = defn.IntType + def LongType = defn.LongType + def ShortType = defn.ShortType + def Types_10 = List.fill(5)(IntType) ::: List.fill(5)(LongType) + def Types_20 = Types_10 ::: Types_10 + + val tup0 = defn.tupleType(Nil) + val tup1 = defn.tupleType(IntType :: Nil) + val tup2 = defn.tupleType(IntType :: LongType :: Nil) + val tup3 = defn.tupleType(IntType :: LongType :: ShortType :: Nil) + val tup21 = defn.tupleType(Types_20 ::: IntType :: Nil) + val tup22 = defn.tupleType(Types_20 ::: IntType :: LongType :: Nil) + val tup23 = defn.tupleType(Types_20 ::: IntType :: LongType :: ShortType :: Nil) + val tup24 = defn.tupleType(Types_20 ::: IntType :: LongType :: ShortType :: ShortType :: Nil) + + @Test def tup0_show = chkEq("EmptyTuple.type", i"$tup0") + @Test def tup1_show = chkEq("Tuple1[Int]", i"$tup1") + @Test def tup2_show = chkEq("(Int, Long)", i"$tup2") + @Test def tup3_show = chkEq("(Int, Long, Short)", i"$tup3") + @Test def tup21_show = chkEq(res21, i"$tup21") + @Test def tup22_show = chkEq(res22, i"$tup22") + @Test def tup23_show = chkEq(res23, i"$tup23") + @Test def tup24_show = chkEq(res24, i"$tup24") + + @Test def tup3_text = + val obt = tup3.toText(ctx.printer) + val exp = Fluid(List( + Str(")"), + Str("Short"), + Closed(List(Str(", "), Str("Long"))), + Closed(List(Str(", "), Str("Int"))), + Str("("), + )) + chkEq(exp, obt) + + @Test def tup3_layout10 = + val obt = tup3.toText(ctx.printer).layout(10) + val exp = Fluid(List( + Str(" Short)"), + Str(" Long, "), + Str("(Int, "), + )) + chkEq(exp, obt) + + @Test def tup3_show10 = chkEq("(Int,\n Long,\n Short)".normEOL, tup3.toText(ctx.printer).mkString(10, false)) + + val res21 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int)""".stripMargin.normEOL + + val res22 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int, Long)""".stripMargin.normEOL + + val res23 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int, Long, Short)""".stripMargin.normEOL + + val res24 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int, Long, Short, Short)""".stripMargin.normEOL + + def chkEq[A](expected: A, obtained: A) = assert(expected == obtained, diff(s"$expected", s"$obtained")) + + /** On Windows the string literal in this test source file will be read with `\n` (b/c of "-encoding UTF8") + * but the compiler will correctly emit \r\n as the line separator. + * So we align the expected result to faithfully compare test results. */ + extension (str: String) def normEOL = if EOL == "\n" then str else str.replace("\n", EOL).nn + + def diff(exp: String, obt: String) = + val min = math.min(exp.length, obt.length) + val pre = + var i = 0 + while i < min && exp(i) == obt(i) do i += 1 + exp.take(i) + val suf = + val max = min - pre.length - 1 + var i = 0 + while i <= max && exp(exp.length - 1 - i) == obt(obt.length - 1 - i) do i += 1 + exp.drop(exp.length - 1) + + import scala.io.AnsiColor.* + val ellip = BLACK + BOLD + "..." + RESET + val compactPre = if pre.length <= 20 then pre else ellip + pre.drop(pre.length - 20) + val compactSuf = if suf.length <= 20 then suf else suf.take(20) + ellip + def extractDiff(s: String) = s.slice(pre.length, s.length - suf.length) + s"""|Comparison Failure: + | expected: $compactPre${CYAN }${extractDiff(exp)}$RESET$compactSuf + | obtained: $compactPre$MAGENTA${extractDiff(obt)}$RESET$compactSuf + |""".stripMargin diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index 05f218059f02..44cf83b521f4 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -6,6 +6,7 @@ import Settings._ import org.junit.Test import org.junit.Assert._ +import core.Decorators.toMessage class ScalaSettingsTests: @@ -72,14 +73,14 @@ class ScalaSettingsTests: val proc = sets.processArguments(sumy, processAll = true, skipped = Nil) val conf = sets.Wconf.valueIn(proc.sstate) val sut = reporting.WConf.fromSettings(conf).getOrElse(???) - val msg = NoExplanation("There was a problem!") + val msg = "There was a problem!".toMessage val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition) assertEquals(Action.Silent, sut.action(depr)) val feat = new Diagnostic.FeatureWarning(msg, util.NoSourcePosition) assertEquals(Action.Error, sut.action(feat)) val warn = new Diagnostic.Warning(msg, util.NoSourcePosition) assertEquals(Action.Warning, sut.action(warn)) - val nowr = new Diagnostic.Warning(NoExplanation("This is a problem."), util.NoSourcePosition) + val nowr = new Diagnostic.Warning("This is a problem.".toMessage, util.NoSourcePosition) assertEquals(Action.Silent, sut.action(nowr)) end ScalaSettingsTests diff --git a/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala b/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala index 5ab162b9f05c..9ae3fda8c6b9 100644 --- a/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala +++ b/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala @@ -53,3 +53,41 @@ class ConstraintsTest: i"Merging constraints `?S <: ?T` and `Int <: ?S` should result in `Int <:< ?T`: ${ctx.typerState.constraint}") } end mergeBoundsTransitivity + + @Test def validBoundsInit: Unit = inCompilerContext( + TestConfiguration.basicClasspath, + scalaSources = "trait A { def foo[S >: T <: T | Int, T <: String]: Any }") { + val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 + val List(s, t) = tvars.tpes + + val TypeBounds(lo, hi) = ctx.typerState.constraint.entry(t.asInstanceOf[TypeVar].origin): @unchecked + assert(lo =:= defn.NothingType, i"Unexpected lower bound $lo for $t: ${ctx.typerState.constraint}") + assert(hi =:= defn.StringType, i"Unexpected upper bound $hi for $t: ${ctx.typerState.constraint}") // used to be Any + } + + @Test def validBoundsUnify: Unit = inCompilerContext( + TestConfiguration.basicClasspath, + scalaSources = "trait A { def foo[S >: T <: T | Int, T <: String | Int]: Any }") { + val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 + val List(s, t) = tvars.tpes + + s <:< t + + val TypeBounds(lo, hi) = ctx.typerState.constraint.entry(t.asInstanceOf[TypeVar].origin): @unchecked + assert(lo =:= defn.NothingType, i"Unexpected lower bound $lo for $t: ${ctx.typerState.constraint}") + assert(hi =:= (defn.StringType | defn.IntType), i"Unexpected upper bound $hi for $t: ${ctx.typerState.constraint}") + } + + @Test def validBoundsReplace: Unit = inCompilerContext( + TestConfiguration.basicClasspath, + scalaSources = "trait X; trait A { def foo[S <: U | X, T, U]: Any }") { + val tvarTrees = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 + val tvars @ List(s, t, u) = tvarTrees.tpes.asInstanceOf[List[TypeVar]] + s =:= t + t =:= u + + for tvar <- tvars do + val entry = ctx.typerState.constraint.entry(tvar.origin) + assert(!ctx.typerState.constraint.occursAtToplevel(tvar.origin, entry), + i"cyclic bound for ${tvar.origin}: ${entry} in ${ctx.typerState.constraint}") + } diff --git a/compiler/test/dotty/tools/dotc/core/ShowDecoratorTest.scala b/compiler/test/dotty/tools/dotc/core/ShowDecoratorTest.scala new file mode 100644 index 000000000000..acc9d1914bf6 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/core/ShowDecoratorTest.scala @@ -0,0 +1,21 @@ +package dotty.tools +package dotc +package core + +import Contexts.*, Decorators.*, Denotations.*, SymDenotations.*, Symbols.*, Types.* +import printing.Formatting.Show + +import org.junit.Test +import org.junit.Assert.* + +class ShowDecoratorTest extends DottyTest: + import ShowDecoratorTest.* + + @Test def t1 = assertEquals("... (cannot display due to FooException boom) ...", Foo().tryToShow) +end ShowDecoratorTest + +object ShowDecoratorTest: + import printing.*, Texts.* + class FooException extends Exception("boom") + case class Foo() extends Showable: + def toText(printer: Printer): Text = throw new FooException diff --git a/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala b/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala index 1a9248c49a82..77e172f61167 100644 --- a/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala +++ b/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala @@ -4,13 +4,13 @@ import org.junit.Test import org.junit.AfterClass import org.junit.Assert.* import org.junit.experimental.categories.Category - import dotty.{BootstrappedOnlyTests, Properties} import dotty.tools.vulpix.* import dotty.tools.vulpix.TestConfiguration.* import dotty.tools.dotc.Main +import dotty.tools.dotc.reporting.TestReporter -import java.nio.file.{Files, FileSystems, Path, Paths, StandardCopyOption} +import java.nio.file.{FileSystems, Files, Path, Paths, StandardCopyOption} import scala.jdk.CollectionConverters.* import scala.util.Properties.userDir import scala.language.unsafeNulls @@ -33,11 +33,12 @@ class CoverageTests: checkCoverageIn(rootSrc.resolve("run"), true) def checkCoverageIn(dir: Path, run: Boolean)(using TestGroup): Unit = - /** Converts \ to / on windows, to make the tests pass without changing the serialization. */ + /** Converts \\ (escaped \) to / on windows, to make the tests pass without changing the serialization. */ def fixWindowsPaths(lines: Buffer[String]): Buffer[String] = val separator = java.io.File.separatorChar - if separator != '/' then - lines.map(_.replace(separator, '/')) + if separator == '\\' then + val escapedSep = "\\\\" + lines.map(_.replace(escapedSep, "/")) else lines end fixWindowsPaths @@ -84,6 +85,7 @@ object CoverageTests extends ParallelTesting: def testFilter = Properties.testsFilter def isInteractive = SummaryReport.isInteractive def updateCheckFiles = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests given summaryReport: SummaryReporting = SummaryReport() @AfterClass def tearDown(): Unit = diff --git a/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala b/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala index a54880326704..bb2797c5d034 100644 --- a/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala +++ b/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala @@ -59,8 +59,8 @@ class DeSugarTest extends ParserTest { cpy.DefDef(tree1)(name, transformParamss(paramss), transform(tpt, Type), transform(tree1.rhs)) case tree1 @ TypeDef(name, rhs) => cpy.TypeDef(tree1)(name, transform(rhs, Type)) - case impl @ Template(constr, parents, self, _) => - cpy.Template(tree1)(transformSub(constr), transform(parents), Nil, transformSub(self), transform(impl.body, Expr)) + case impl @ Template(constr, _, self, _) => + cpy.Template(tree1)(transformSub(constr), transform(impl.parentsOrDerived), Nil, transformSub(self), transform(impl.body, Expr)) case Thicket(trees) => Thicket(flatten(trees mapConserve super.transform)) case tree1 => diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index 710ceee0a7c0..2c970e93f573 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -19,20 +19,26 @@ import dotty.tools.io.Directory import scala.io.Source import org.junit.Test +import scala.util.Using +import java.io.File class PrintingTest { - def options(phase: String) = - List(s"-Xprint:$phase", "-color:never", "-classpath", TestConfiguration.basicClasspath) + def options(phase: String, flags: List[String]) = + List(s"-Xprint:$phase", "-color:never", "-classpath", TestConfiguration.basicClasspath) ::: flags private def compileFile(path: JPath, phase: String): Boolean = { val baseFilePath = path.toString.stripSuffix(".scala") val checkFilePath = baseFilePath + ".check" + val flagsFilePath = baseFilePath + ".flags" val byteStream = new ByteArrayOutputStream() val reporter = TestReporter.reporter(new PrintStream(byteStream), INFO) + val flags = + if (!(new File(flagsFilePath)).exists) Nil + else Using(Source.fromFile(flagsFilePath, StandardCharsets.UTF_8.name))(_.getLines().toList).get try { - Main.process((path.toString::options(phase)).toArray, reporter, null) + Main.process((path.toString :: options(phase, flags)).toArray, reporter, null) } catch { case e: Throwable => println(s"Compile $path exception:") @@ -40,7 +46,7 @@ class PrintingTest { } val actualLines = byteStream.toString(StandardCharsets.UTF_8.name).linesIterator - FileDiff.checkAndDump(path.toString, actualLines.toIndexedSeq, checkFilePath) + FileDiff.checkAndDumpOrUpdate(path.toString, actualLines.toIndexedSeq, checkFilePath) } def testIn(testsDir: String, phase: String) = @@ -63,4 +69,7 @@ class PrintingTest { @Test def untypedPrinting: Unit = testIn("tests/printing/untyped", "parser") + + @Test + def transformedPrinting: Unit = testIn("tests/printing/transformed", "repeatableAnnotations") } diff --git a/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala b/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala index 2f35ccb35434..2e4b7bf1bb3f 100644 --- a/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala +++ b/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala @@ -12,8 +12,7 @@ class SyntaxHighlightingTests extends DottyTest { import SyntaxHighlighting._ private def test(source: String, expected: String): Unit = { - val testCtx = ctx.fresh.setSetting(ctx.settings.color, "always") - val highlighted = SyntaxHighlighting.highlight(source)(using testCtx) + val highlighted = SyntaxHighlighting.highlight(source)(using ctx.withColors) .replace(NoColor, ">") .replace(CommentColor, " JFile, FileOutputStream, StringWriter } +import java.io.{BufferedReader, FileInputStream, FileOutputStream, FileReader, PrintStream, PrintWriter, StringReader, StringWriter, File as JFile} import java.text.SimpleDateFormat import java.util.Date -import core.Decorators._ +import core.Decorators.* import scala.collection.mutable - +import scala.jdk.CollectionConverters.* import util.SourcePosition -import core.Contexts._ -import Diagnostic._ -import interfaces.Diagnostic.{ ERROR, WARNING } +import core.Contexts.* +import Diagnostic.* +import dotty.Properties +import interfaces.Diagnostic.{ERROR, WARNING} + +import scala.io.Codec class TestReporter protected (outWriter: PrintWriter, filePrintln: String => Unit, logLevel: Int) extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with MessageRendering { @@ -32,6 +34,10 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M private var _didCrash = false final def compilerCrashed: Boolean = _didCrash + private var _skip: Boolean = false + final def setSkip(): Unit = _skip = true + final def skipped: Boolean = _skip + protected final def inlineInfo(pos: SourcePosition)(using Context): String = if (pos.exists) { if (pos.outer.exists) @@ -80,17 +86,23 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M } object TestReporter { + private val testLogsDirName: String = "testlogs" + private val failedTestsFileName: String = "last-failed.log" + private val failedTestsFile: JFile = new JFile(s"$testLogsDirName/$failedTestsFileName") + private var outFile: JFile = _ private var logWriter: PrintWriter = _ + private var failedTestsWriter: PrintWriter = _ private def initLog() = if (logWriter eq null) { val date = new Date val df0 = new SimpleDateFormat("yyyy-MM-dd") val df1 = new SimpleDateFormat("yyyy-MM-dd-'T'HH-mm-ss") - val folder = s"testlogs/tests-${df0.format(date)}" + val folder = s"$testLogsDirName/tests-${df0.format(date)}" new JFile(folder).mkdirs() outFile = new JFile(s"$folder/tests-${df1.format(date)}.log") logWriter = new PrintWriter(new FileOutputStream(outFile, true)) + failedTestsWriter = new PrintWriter(new FileOutputStream(failedTestsFile, false)) } def logPrintln(str: String) = { @@ -140,4 +152,16 @@ object TestReporter { } rep } + + def lastRunFailedTests: Option[List[String]] = + Option.when( + Properties.rerunFailed && + failedTestsFile.exists() && + failedTestsFile.isFile + )(java.nio.file.Files.readAllLines(failedTestsFile.toPath).asScala.toList) + + def writeFailedTests(tests: List[String]): Unit = + initLog() + tests.foreach(failed => failedTestsWriter.println(failed)) + failedTestsWriter.flush() } diff --git a/compiler/test/dotty/tools/dotc/reporting/UserDefinedErrorMessages.scala b/compiler/test/dotty/tools/dotc/reporting/UserDefinedErrorMessages.scala index 4d73b0d88b55..807d3a19f8f3 100644 --- a/compiler/test/dotty/tools/dotc/reporting/UserDefinedErrorMessages.scala +++ b/compiler/test/dotty/tools/dotc/reporting/UserDefinedErrorMessages.scala @@ -26,9 +26,9 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "Could not prove Int =!= Int") + assertEquals(m.message, "Could not prove Int =!= Int") } @Test def userDefinedImplicitAmbiguous2 = @@ -50,9 +50,9 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "Could not prove Int =!= Int") + assertEquals(m.message, "Could not prove Int =!= Int") } @Test def userDefinedImplicitAmbiguous3 = @@ -75,9 +75,9 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "Could not prove Int =!= Int") + assertEquals(m.message, "Could not prove Int =!= Int") } @Test def userDefinedImplicitAmbiguous4 = @@ -97,9 +97,9 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "msg A=Any") + assertEquals(m.message, "msg A=Any") } @Test def userDefinedImplicitAmbiguous5 = @@ -119,8 +119,8 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "msg A=Any") + assertEquals(m.message, "msg A=Any") } } diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index eb6ab8e8fb5f..1e7d7ef2c708 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -20,7 +20,7 @@ class PatmatExhaustivityTest { val testsDir = "tests/patmat" // pagewidth/color: for a stable diff as the defaults are based on the terminal (e.g size) // stop-after: patmatexhaust-huge.scala crash compiler (but also hides other warnings..) - val options = List("-pagewidth", "80", "-color:never", "-Ystop-after:explicitSelf", "-classpath", TestConfiguration.basicClasspath) + val options = List("-pagewidth", "80", "-color:never", "-Ystop-after:explicitSelf", "-Ycheck-constraint-deps", "-classpath", TestConfiguration.basicClasspath) private def compile(files: List[JPath]): Seq[String] = { val opts = toolArgsFor(files).get(ToolName.Scalac).getOrElse(Nil) diff --git a/compiler/test/dotty/tools/dotc/transform/TypeTestsCastsTest.scala b/compiler/test/dotty/tools/dotc/transform/TypeTestsCastsTest.scala index 0db7a6072579..9f6f155a2ac2 100644 --- a/compiler/test/dotty/tools/dotc/transform/TypeTestsCastsTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/TypeTestsCastsTest.scala @@ -6,6 +6,8 @@ import core.* import Contexts.*, Decorators.*, Denotations.*, SymDenotations.*, Symbols.*, Types.* import Annotations.* +import dotty.tools.dotc.util.Spans.Span + import org.junit.Test import org.junit.Assert.* @@ -15,7 +17,7 @@ class TypeTestsCastsTest extends DottyTest: @Test def orL = checkFound(List(StringType, LongType), OrType(LongType, StringType, false)) @Test def orR = checkFound(List(LongType, StringType), OrType(StringType, LongType, false)) - @Test def annot = checkFound(List(StringType, LongType), AnnotatedType(OrType(LongType, StringType, false), Annotation(defn.UncheckedAnnot))) + @Test def annot = checkFound(List(StringType, LongType), AnnotatedType(OrType(LongType, StringType, false), Annotation(defn.UncheckedAnnot, Span(0)))) @Test def andL = checkFound(List(StringType), AndType(StringType, AnyType)) @Test def andR = checkFound(List(StringType), AndType(AnyType, StringType)) diff --git a/compiler/test/dotty/tools/dotc/transform/patmat/SpaceEngineTest.scala b/compiler/test/dotty/tools/dotc/transform/patmat/SpaceEngineTest.scala new file mode 100644 index 000000000000..c13ef0532348 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/transform/patmat/SpaceEngineTest.scala @@ -0,0 +1,64 @@ +package dotty.tools +package dotc +package transform +package patmat + +import core.*, Annotations.*, Contexts.*, Decorators.*, Flags.*, Names.*, StdNames.*, Symbols.*, Types.* +import ast.*, tpd.* + +import vulpix.TestConfiguration, TestConfiguration.basicClasspath + +import org.junit, junit.Test, junit.Assert.* + +class SpaceEngineTest: + import SpaceEngine.* + + @Test def isSubspaceTest1: Unit = inCompilerContext(basicClasspath) { + // Testing the property of `isSubspace` that: + // isSubspace(a, b) <=> simplify(simplify(a) - simplify(a)) == Empty + // Previously there were no simplify calls, + // and this is a counter-example, + // for which you need either to simplify(b) or simplify the minus result. + + val tp = defn.ConsType.appliedTo(defn.AnyType) + val unappTp = requiredMethod("scala.collection.immutable.::.unapply").termRef + val params = List(Empty, Typ(tp)) + + val a = Prod(tp, unappTp, params) + val b = Empty + + val res1 = isSubspace(a, b) + + val a2 = simplify(a) + val b2 = simplify(b) + val rem1 = minus(a2, b2) + val rem2 = simplify(rem1) + val res2 = rem2 == Empty + + assertEquals( + i"""|isSubspace: + | + |isSubspace(a, b) = $res1 + | + |Should be equivalent to: + |simplify(simplify(a) - simplify(b)) == Empty + |simplify(a2 - b2) == Empty + |simplify(rem1) == Empty + |rem2 == Empty + | + |a = ${show(a)} + |b = ${show(b)} + |a2 = ${show(a2)} + |b2 = ${show(b2)} + |rem1 = ${show(rem1)} + |rem2 = ${show(rem2)} + | + |a = ${a.toString} + |b = ${b.toString} + |a2 = ${a2.toString} + |b2 = ${b2.toString} + |rem1 = ${rem1.toString} + |rem2 = ${rem2.toString} + | + |""".stripMargin, res1, res2) + } diff --git a/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala new file mode 100644 index 000000000000..b08062913dac --- /dev/null +++ b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala @@ -0,0 +1,57 @@ +package dotty.tools +package dotc +package typer + +// Modelling the decision in IsFullyDefined +object InstantiateModel: + enum LB { case NN; case LL; case L1 }; import LB.* + enum UB { case AA; case UU; case U1 }; import UB.* + enum Var { case V; case NotV }; import Var.* + enum MSe { case M; case NotM }; import MSe.* + enum Bot { case Fail; case Ok; case Flip }; import Bot.* + enum Act { case Min; case Max; case ToMax; case Skip; case False }; import Act.* + + // NN/AA = Nothing/Any + // LL/UU = the original bounds, on the type parameter + // L1/U1 = the constrained bounds, on the type variable + // V = variance >= 0 ("non-contravariant") + // MSe = minimisedSelected + // Bot = IfBottom + // ToMax = delayed maximisation, via addition to toMaximize + // Skip = minimisedSelected "hold off instantiating" + // False = return false + + // there are 9 combinations: + // # | LB | UB | d | // d = direction + // --+----+----+---+ + // 1 | L1 | AA | - | L1 <: T + // 2 | L1 | UU | - | L1 <: T <: UU + // 3 | LL | U1 | + | LL <: T <: U1 + // 4 | NN | U1 | + | T <: U1 + // 5 | L1 | U1 | 0 | L1 <: T <: U1 + // 6 | LL | UU | 0 | LL <: T <: UU + // 7 | LL | AA | 0 | LL <: T + // 8 | NN | UU | 0 | T <: UU + // 9 | NN | AA | 0 | T + + def decide(lb: LB, ub: UB, v: Var, bot: Bot, m: MSe): Act = (lb, ub) match + case (L1, AA) => Min + case (L1, UU) => Min + case (LL, U1) => Max + case (NN, U1) => Max + + case (L1, U1) => if m==M || v==V then Min else ToMax + case (LL, UU) => if m==M || v==V then Min else ToMax + case (LL, AA) => if m==M || v==V then Min else ToMax + + case (NN, UU) => bot match + case _ if m==M => Max + //case Ok if v==V => Min // removed, i14218 fix + case Fail if v==V => False + case _ => ToMax + + case (NN, AA) => bot match + case _ if m==M => Skip + case Ok if v==V => Min + case Fail if v==V => False + case _ => ToMax diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index 866647476888..bcb08cd232d7 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -347,27 +347,6 @@ class ReplCompilerTests extends ReplTest: assertEquals("java.lang.AssertionError: assertion failed", all.head) } - @Test def i14491 = - initially { - run("import language.experimental.fewerBraces") - } andThen { - run("""|val x = Seq(7,8,9).apply: - | 1 - |""".stripMargin) - assertEquals("val x: Int = 8", storedOutput().trim) - } - initially { - run("""|import language.experimental.fewerBraces - |import language.experimental.fewerBraces as _ - |""".stripMargin) - } andThen { - run("""|val x = Seq(7,8,9).apply: - | 1 - |""".stripMargin) - assert("expected error if fewerBraces is unimported", - lines().exists(_.contains("missing arguments for method apply"))) - } - object ReplCompilerTests: private val pattern = Pattern.compile("\\r[\\n]?|\\n"); diff --git a/compiler/test/dotty/tools/repl/ScriptedTests.scala b/compiler/test/dotty/tools/repl/ScriptedTests.scala index 5c3a32cd40f8..dc809228e86b 100644 --- a/compiler/test/dotty/tools/repl/ScriptedTests.scala +++ b/compiler/test/dotty/tools/repl/ScriptedTests.scala @@ -3,12 +3,16 @@ package tools package repl import org.junit.Test +import org.junit.experimental.categories.Category /** Runs all tests contained in `compiler/test-resources/repl/` */ class ScriptedTests extends ReplTest { @Test def replTests = scripts("/repl").foreach(testFile) + @Category(Array(classOf[BootstrappedOnlyTests])) + @Test def replMacrosTests = scripts("/repl-macros").foreach(testFile) + @Test def typePrinterTests = scripts("/type-printer").foreach(testFile) } diff --git a/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala b/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala index 5a96976bd867..7272c10aa003 100644 --- a/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala +++ b/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala @@ -32,6 +32,20 @@ class ShadowingBatchTests extends ErrorMessagesTest: ictx.setSetting(classpath, classpath.value + File.pathSeparator + dir.jpath.toAbsolutePath) } + @Test def io = + val lib = """|package io.foo + | + |object Bar { + | def baz: Int = 42 + |} + |""".stripMargin + val app = """|object Main: + | def main(args: Array[String]): Unit = + | println(io.foo.Bar.baz) + |""".stripMargin + checkMessages(lib).expectNoErrors + checkMessages(app).expectNoErrors + @Test def file = checkMessages("class C(val c: Int)").expectNoErrors checkMessages("object rsline1 {\n def line1 = new C().c\n}").expect { (_, msgs) => diff --git a/compiler/test/dotty/tools/repl/ShadowingTests.scala b/compiler/test/dotty/tools/repl/ShadowingTests.scala index 1ba58a4babff..98aa58a62a15 100644 --- a/compiler/test/dotty/tools/repl/ShadowingTests.scala +++ b/compiler/test/dotty/tools/repl/ShadowingTests.scala @@ -76,11 +76,23 @@ class ShadowingTests extends ReplTest(options = ShadowingTests.options): Files.delete(file) end compileShadowed + @Test def io = shadowedScriptedTest(name = "io", + shadowed = """|package io.foo + | + |object Bar { + | def baz: Int = 42 + |} + |""".stripMargin, + script = """|scala> io.foo.Bar.baz + |val res0: Int = 42 + |""".stripMargin + ) + @Test def i7635 = shadowedScriptedTest(name = "", shadowed = "class C(val c: Int)", script = """|scala> new C().c - |-- Error: ---------------------------------------------------------------------- + |-- [E171] Type Error: ---------------------------------------------------------- |1 | new C().c | | ^^^^^^^ | | missing argument for parameter c of constructor C in class C: (c: Int): C @@ -122,13 +134,18 @@ class ShadowingTests extends ReplTest(options = ShadowingTests.options): |val y: String = foo | |scala> if (true) x else y - |val res0: Matchable = 42 + |val res0: Int | String = 42 |""".stripMargin.linesIterator.toList ) ShadowingTests.createSubDir("util") testScript(name = "", """|scala> import util.Try + |-- [E008] Not Found Error: ----------------------------------------------------- + |1 | import util.Try + | | ^^^ + | | value Try is not a member of util + |1 error found | |scala> object util { class Try { override def toString = "you've gotta try!" } } |// defined object util diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index ecb01d6863da..910584a9b5e7 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -228,4 +228,13 @@ class TabcompleteTests extends ReplTest { tabComplete(":d") ) } + + @Test def `i16551 typer phase for implicits` = initially { + val comp = tabComplete("BigInt(1).") + assertTrue(comp.distinct.nonEmpty) + } + + @Test def i9334 = initially { + assert(tabComplete("class Foo[T]; classOf[Foo].").contains("getName")) + } } diff --git a/compiler/test/dotty/tools/vulpix/FailedTestInfo.scala b/compiler/test/dotty/tools/vulpix/FailedTestInfo.scala new file mode 100644 index 000000000000..c7172f54aadc --- /dev/null +++ b/compiler/test/dotty/tools/vulpix/FailedTestInfo.scala @@ -0,0 +1,3 @@ +package dotty.tools.vulpix + +case class FailedTestInfo(title: String, extra: String) diff --git a/compiler/test/dotty/tools/vulpix/FileDiff.scala b/compiler/test/dotty/tools/vulpix/FileDiff.scala index c060c4d3938c..5e882be6425a 100644 --- a/compiler/test/dotty/tools/vulpix/FileDiff.scala +++ b/compiler/test/dotty/tools/vulpix/FileDiff.scala @@ -50,21 +50,6 @@ object FileDiff { outFile.writeAll(content.mkString("", EOL, EOL)) } - def checkAndDump(sourceTitle: String, actualLines: Seq[String], checkFilePath: String): Boolean = { - val outFilePath = checkFilePath + ".out" - FileDiff.check(sourceTitle, actualLines, checkFilePath) match { - case Some(msg) => - FileDiff.dump(outFilePath, actualLines) - println(msg) - println(FileDiff.diffMessage(checkFilePath, outFilePath)) - false - case _ => - val jOutFilePath = Paths.get(outFilePath) - Files.deleteIfExists(jOutFilePath) - true - } - } - def checkAndDumpOrUpdate(sourceTitle: String, actualLines: Seq[String], checkFilePath: String): Boolean = { val outFilePath = checkFilePath + ".out" FileDiff.check(sourceTitle, actualLines, checkFilePath) match { diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index fb60d98ea5cf..3799a2335a78 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -57,6 +57,9 @@ trait ParallelTesting extends RunnerOrchestration { self => /** Tests should override the checkfiles with the current output */ def updateCheckFiles: Boolean + /** Contains a list of failed tests to run, if list is empty no tests will run */ + def failedTests: Option[List[String]] + /** A test source whose files or directory of files is to be compiled * in a specific way defined by the `Test` */ @@ -204,6 +207,14 @@ trait ParallelTesting extends RunnerOrchestration { self => protected def shouldSkipTestSource(testSource: TestSource): Boolean = false + protected def shouldReRun(testSource: TestSource): Boolean = + failedTests.forall(rerun => testSource match { + case JointCompilationSource(_, files, _, _, _, _) => + rerun.exists(filter => files.exists(file => file.getPath.contains(filter))) + case SeparateCompilationSource(_, dir, _, _) => + rerun.exists(dir.getPath.contains) + }) + private trait CompilationLogic { this: Test => def suppressErrors = false @@ -281,10 +292,12 @@ trait ParallelTesting extends RunnerOrchestration { self => private final def onComplete(testSource: TestSource, reportersOrCrash: Try[Seq[TestReporter]], logger: LoggedRunnable): Unit = reportersOrCrash match { case TryFailure(exn) => onFailure(testSource, Nil, logger, Some(s"Fatal compiler crash when compiling: ${testSource.title}:\n${exn.getMessage}${exn.getStackTrace.map("\n\tat " + _).mkString}")) - case TrySuccess(reporters) => maybeFailureMessage(testSource, reporters) match { - case Some(msg) => onFailure(testSource, reporters, logger, Option(msg).filter(_.nonEmpty)) - case None => onSuccess(testSource, reporters, logger) - } + case TrySuccess(reporters) if !reporters.exists(_.skipped) => + maybeFailureMessage(testSource, reporters) match { + case Some(msg) => onFailure(testSource, reporters, logger, Option(msg).filter(_.nonEmpty)) + case None => onSuccess(testSource, reporters, logger) + } + case _ => } /** @@ -357,7 +370,7 @@ trait ParallelTesting extends RunnerOrchestration { self => case SeparateCompilationSource(_, dir, _, _) => testFilter.exists(dir.getPath.contains) } - filteredByName.filterNot(shouldSkipTestSource(_)) + filteredByName.filterNot(shouldSkipTestSource(_)).filter(shouldReRun(_)) /** Total amount of test sources being compiled by this test */ val sourceCount = filteredSources.length @@ -391,6 +404,10 @@ trait ParallelTesting extends RunnerOrchestration { self => /** Number of failed tests */ def failureCount: Int = _failureCount + private var _skipCount = 0 + protected final def registerSkip(): Unit = synchronized { _skipCount += 1 } + def skipCount: Int = _skipCount + protected def logBuildInstructions(testSource: TestSource, reporters: Seq[TestReporter]) = { val (errCount, warnCount) = countErrorsAndWarnings(reporters) val errorMsg = testSource.buildInstructions(errCount, warnCount) @@ -403,14 +420,14 @@ trait ParallelTesting extends RunnerOrchestration { self => synchronized { reproduceInstructions.append(ins) } /** The test sources that failed according to the implementing subclass */ - private val failedTestSources = mutable.ArrayBuffer.empty[String] + private val failedTestSources = mutable.ArrayBuffer.empty[FailedTestInfo] protected final def failTestSource(testSource: TestSource, reason: Failure = Generic) = synchronized { val extra = reason match { case TimeoutFailure(title) => s", test '$title' timed out" case JavaCompilationFailure(msg) => s", java test sources failed to compile with: \n$msg" case Generic => "" } - failedTestSources.append(testSource.title + s" failed" + extra) + failedTestSources.append(FailedTestInfo(testSource.title, s" failed" + extra)) fail(reason) } @@ -464,13 +481,13 @@ trait ParallelTesting extends RunnerOrchestration { self => val toolArgs = toolArgsFor(files.toList.map(_.toPath), getCharsetFromEncodingOpt(flags0)) val spec = raw"(\d+)(\+)?".r - val testFilter = toolArgs.get(ToolName.Test) match + val testIsFiltered = toolArgs.get(ToolName.Test) match case Some("-jvm" :: spec(n, more) :: Nil) => if more == "+" then isJavaAtLeast(n) else javaSpecVersion == n case Some(args) => throw new IllegalStateException(args.mkString("unknown test option: ", ", ", "")) case None => true - def scalacOptions = toolArgs.get(ToolName.Scalac).getOrElse(Nil) + def scalacOptions = toolArgs.getOrElse(ToolName.Scalac, Nil) val flags = flags0 .and(scalacOptions: _*) @@ -509,7 +526,7 @@ trait ParallelTesting extends RunnerOrchestration { self => val allArgs = flags.all - if testFilter then + if testIsFiltered then // If a test contains a Java file that cannot be parsed by Dotty's Java source parser, its // name must contain the string "JAVA_ONLY". val dottyFiles = files.filterNot(_.getName.contains("JAVA_ONLY")).map(_.getPath) @@ -523,6 +540,9 @@ trait ParallelTesting extends RunnerOrchestration { self => echo(s"\njava compilation failed: \n${ javaErrors.get }") fail(failure = JavaCompilationFailure(javaErrors.get)) } + else + registerSkip() + reporter.setSkip() end if reporter @@ -541,7 +561,7 @@ trait ParallelTesting extends RunnerOrchestration { self => def addToLast(str: String): Unit = diagnostics match case head :: tail => - diagnostics = Diagnostic.Error(s"${head.msg.rawMessage}$str", head.pos) :: tail + diagnostics = Diagnostic.Error(s"${head.msg.message}$str", head.pos) :: tail case Nil => var inError = false for line <- errorsText.linesIterator do @@ -724,15 +744,18 @@ trait ParallelTesting extends RunnerOrchestration { self => } private def verifyOutput(checkFile: Option[JFile], dir: JFile, testSource: TestSource, warnings: Int, reporters: Seq[TestReporter], logger: LoggedRunnable) = { - if (Properties.testsNoRun) addNoRunWarning() + if Properties.testsNoRun then addNoRunWarning() else runMain(testSource.runClassPath, testSource.allToolArgs) match { case Success(output) => checkFile match { case Some(file) if file.exists => diffTest(testSource, file, output.linesIterator.toList, reporters, logger) case _ => } case Failure(output) => - echo(s"Test '${testSource.title}' failed with output:") - echo(output) + if output == "" then + echo(s"Test '${testSource.title}' failed with no output") + else + echo(s"Test '${testSource.title}' failed with output:") + echo(output) failTestSource(testSource) case Timeout => echo("failed because test " + testSource.title + " timed out") @@ -748,7 +771,7 @@ trait ParallelTesting extends RunnerOrchestration { self => extends Test(testSources, times, threadLimit, suppressAllOutput) { override def suppressErrors = true - override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = { + override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = def compilerCrashed = reporters.exists(_.compilerCrashed) lazy val (errorMap, expectedErrors) = getErrorMapAndExpectedCount(testSource.sourceFiles.toIndexedSeq) lazy val actualErrors = reporters.foldLeft(0)(_ + _.errorCount) @@ -772,7 +795,7 @@ trait ParallelTesting extends RunnerOrchestration { self => else if !errorMap.isEmpty then s"\nExpected error(s) have {=}: $errorMap" else null } - } + end maybeFailureMessage override def onSuccess(testSource: TestSource, reporters: Seq[TestReporter], logger: LoggedRunnable): Unit = checkFile(testSource).foreach(diffTest(testSource, _, reporterOutputLines(reporters), reporters, logger)) @@ -780,12 +803,13 @@ trait ParallelTesting extends RunnerOrchestration { self => def reporterOutputLines(reporters: Seq[TestReporter]): List[String] = reporters.flatMap(_.consoleOutput.split("\n")).toList - // In neg-tests we allow two types of error annotations, - // "nopos-error" which doesn't care about position and "error" which - // has to be annotated on the correct line number. + // In neg-tests we allow two or three types of error annotations. + // Normally, `// error` must be annotated on the correct line number. + // `// nopos-error` allows for an error reported without a position. + // `// anypos-error` allows for an error reported with a position that can't be annotated in the check file. // // We collect these in a map `"file:row" -> numberOfErrors`, for - // nopos errors we save them in `"file" -> numberOfNoPosErrors` + // nopos and anypos errors we save them in `"file" -> numberOfNoPosErrors` def getErrorMapAndExpectedCount(files: Seq[JFile]): (HashMap[String, Integer], Int) = val comment = raw"//( *)(nopos-|anypos-)?error".r val errorMap = new HashMap[String, Integer]() @@ -950,8 +974,7 @@ trait ParallelTesting extends RunnerOrchestration { self => * =============== * Since this is a parallel test suite, it is essential to be able to * compose tests to take advantage of the concurrency. This is done using - * the `+` function. This function will make sure that tests being combined - * are compatible according to the `require`s in `+`. + * `aggregateTests` in the companion, which will ensure that aggregation is allowed. */ final class CompilationTest private ( private[ParallelTesting] val targets: List[TestSource], @@ -969,6 +992,14 @@ trait ParallelTesting extends RunnerOrchestration { self => def this(targets: List[TestSource]) = this(targets, 1, true, None, false, false) + def copy(targets: List[TestSource], + times: Int = times, + shouldDelete: Boolean = shouldDelete, + threadLimit: Option[Int] = threadLimit, + shouldFail: Boolean = shouldFail, + shouldSuppressOutput: Boolean = shouldSuppressOutput): CompilationTest = + CompilationTest(targets, times, shouldDelete, threadLimit, shouldFail, shouldSuppressOutput) + /** Creates a "pos" test run, which makes sure that all tests pass * compilation without generating errors and that they do not crash the * compiler @@ -981,7 +1012,7 @@ trait ParallelTesting extends RunnerOrchestration { self => if (!shouldFail && test.didFail) { fail(s"Expected no errors when compiling, failed for the following reason(s):\n${reasonsForFailure(test)}\n") } - else if (shouldFail && !test.didFail) { + else if (shouldFail && !test.didFail && test.skipCount == 0) { fail("Pos test should have failed, but didn't") } @@ -989,23 +1020,21 @@ trait ParallelTesting extends RunnerOrchestration { self => } /** Creates a "neg" test run, which makes sure that each test generates the - * correct amount of errors at the correct positions. It also makes sure - * that none of these tests crash the compiler + * correct number of errors at the correct positions. It also makes sure + * that none of these tests crashes the compiler. */ - def checkExpectedErrors()(implicit summaryReport: SummaryReporting): this.type = { + def checkExpectedErrors()(implicit summaryReport: SummaryReporting): this.type = val test = new NegTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite() cleanup() - if (shouldFail && !test.didFail) { + if shouldFail && !test.didFail && test.skipCount == 0 then fail(s"Neg test shouldn't have failed, but did. Reasons:\n${ reasonsForFailure(test) }") - } - else if (!shouldFail && test.didFail) { + else if !shouldFail && test.didFail then fail("Neg test should have failed, but did not") - } this - } + end checkExpectedErrors /** Creates a "fuzzy" test run, which makes sure that each test compiles (or not) without crashing */ def checkNoCrash()(implicit summaryReport: SummaryReporting): this.type = { @@ -1030,12 +1059,10 @@ trait ParallelTesting extends RunnerOrchestration { self => cleanup() - if (!shouldFail && test.didFail) { + if !shouldFail && test.didFail then fail(s"Run test failed, but should not, reasons:\n${ reasonsForFailure(test) }") - } - else if (shouldFail && !test.didFail) { + else if shouldFail && !test.didFail && test.skipCount == 0 then fail("Run test should have failed, but did not") - } this } @@ -1160,35 +1187,32 @@ trait ParallelTesting extends RunnerOrchestration { self => } } - object CompilationTest { + object CompilationTest: /** Compose test targets from `tests` - * - * It does this, only if the two tests are compatible. Otherwise it throws - * an `IllegalArgumentException`. - * - * Grouping tests together like this allows us to take advantage of the - * concurrency offered by this test suite as each call to an executing - * method (`pos()` / `checkExpectedErrors()`/ `run()`) will spin up a thread pool with the - * maximum allowed level of concurrency. Doing this for only a few targets - * does not yield any real benefit over sequential compilation. - * - * As such, each `CompilationTest` should contain as many targets as - * possible. - */ - def aggregateTests(tests: CompilationTest*): CompilationTest = { + * + * It does this, only if all the tests are mutally compatible. + * Otherwise it throws an `IllegalArgumentException`. + * + * Grouping tests together like this allows us to take advantage of the + * concurrency offered by this test suite, as each call to an executing + * method (`pos()` / `checkExpectedErrors()`/ `run()`) will spin up a thread pool with the + * maximum allowed level of concurrency. Doing this for only a few targets + * does not yield any real benefit over sequential compilation. + * + * As such, each `CompilationTest` should contain as many targets as + * possible. + */ + def aggregateTests(tests: CompilationTest*): CompilationTest = assert(tests.nonEmpty) - def aggregate(test1: CompilationTest, test2: CompilationTest) = { + def aggregate(test1: CompilationTest, test2: CompilationTest) = require(test1.times == test2.times, "can't combine tests that are meant to be benchmark compiled") require(test1.shouldDelete == test2.shouldDelete, "can't combine tests that differ on deleting output") require(test1.shouldFail == test2.shouldFail, "can't combine tests that have different expectations on outcome") require(test1.shouldSuppressOutput == test2.shouldSuppressOutput, "can't combine tests that both suppress and don't suppress output") - new CompilationTest(test1.targets ++ test2.targets, test1.times, test1.shouldDelete, test1.threadLimit, test1.shouldFail, test1.shouldSuppressOutput) - } + test1.copy(test1.targets ++ test2.targets) // what if thread limit differs? currently threads are limited on aggregate only tests.reduce(aggregate) - } - - } + end CompilationTest /** Create out directory for directory `d` */ def createOutputDirsForDir(d: JFile, sourceDir: JFile, outDir: String): JFile = { diff --git a/compiler/test/dotty/tools/vulpix/SummaryReport.scala b/compiler/test/dotty/tools/vulpix/SummaryReport.scala index e216ac1c5d4f..74612387015f 100644 --- a/compiler/test/dotty/tools/vulpix/SummaryReport.scala +++ b/compiler/test/dotty/tools/vulpix/SummaryReport.scala @@ -3,7 +3,6 @@ package tools package vulpix import scala.language.unsafeNulls - import scala.collection.mutable import dotc.reporting.TestReporter @@ -23,7 +22,7 @@ trait SummaryReporting { def reportPassed(): Unit /** Add the name of the failed test */ - def addFailedTest(msg: String): Unit + def addFailedTest(msg: FailedTestInfo): Unit /** Add instructions to reproduce the error */ def addReproduceInstruction(instr: String): Unit @@ -49,7 +48,7 @@ trait SummaryReporting { final class NoSummaryReport extends SummaryReporting { def reportFailed(): Unit = () def reportPassed(): Unit = () - def addFailedTest(msg: String): Unit = () + def addFailedTest(msg: FailedTestInfo): Unit = () def addReproduceInstruction(instr: String): Unit = () def addStartingMessage(msg: String): Unit = () def addCleanup(f: () => Unit): Unit = () @@ -66,7 +65,7 @@ final class SummaryReport extends SummaryReporting { import scala.jdk.CollectionConverters._ private val startingMessages = new java.util.concurrent.ConcurrentLinkedDeque[String] - private val failedTests = new java.util.concurrent.ConcurrentLinkedDeque[String] + private val failedTests = new java.util.concurrent.ConcurrentLinkedDeque[FailedTestInfo] private val reproduceInstructions = new java.util.concurrent.ConcurrentLinkedDeque[String] private val cleanUps = new java.util.concurrent.ConcurrentLinkedDeque[() => Unit] @@ -79,7 +78,7 @@ final class SummaryReport extends SummaryReporting { def reportPassed(): Unit = passed += 1 - def addFailedTest(msg: String): Unit = + def addFailedTest(msg: FailedTestInfo): Unit = failedTests.add(msg) def addReproduceInstruction(instr: String): Unit = @@ -108,7 +107,8 @@ final class SummaryReport extends SummaryReporting { startingMessages.asScala.foreach(rep.append) - failedTests.asScala.map(x => s" $x\n").foreach(rep.append) + failedTests.asScala.map(x => s" ${x.title}${x.extra}\n").foreach(rep.append) + TestReporter.writeFailedTests(failedTests.asScala.toList.map(_.title)) // If we're compiling locally, we don't need instructions on how to // reproduce failures diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala index 3ea364cc3a68..5d2992b50a09 100644 --- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala +++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala @@ -49,6 +49,7 @@ object TestConfiguration { withCompilerClasspath + File.pathSeparator + mkClasspath(List(Properties.dottyTastyInspector)) lazy val scalaJSClasspath = mkClasspath(List( + Properties.scalaJSJavalib, Properties.scalaJSLibrary, Properties.dottyLibraryJS )) diff --git a/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala b/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala index 75af0aa94893..0044ab8a94e5 100644 --- a/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala +++ b/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala @@ -30,6 +30,7 @@ object VulpixMetaTests extends ParallelTesting { def isInteractive = false // Don't beautify output for interactive use. def testFilter = Nil // Run all the tests. def updateCheckFiles: Boolean = false + def failedTests = None @AfterClass def tearDown() = this.cleanup() diff --git a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala index 8a32fd636e76..baf61c845d96 100644 --- a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala +++ b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala @@ -108,6 +108,7 @@ object VulpixUnitTests extends ParallelTesting { def isInteractive = !sys.env.contains("DRONE") def testFilter = Nil def updateCheckFiles: Boolean = false + def failedTests = None @AfterClass def tearDown() = this.cleanup() diff --git a/compiler/test/worksheets/baseTypetest.sc b/compiler/test/worksheets/baseTypetest.sc index 001f1e3b3eaa..4dbd68a6fdc7 100644 --- a/compiler/test/worksheets/baseTypetest.sc +++ b/compiler/test/worksheets/baseTypetest.sc @@ -22,5 +22,5 @@ object baseTypetest extends DottyTest { defn.StringClass isSubClass defn.NullClass //> res4: Boolean = false defn.StringClass.typeRef.baseType(defn.NullClass) //> res5: dotty.tools.dotc.core.Types.Type = NoType - + } \ No newline at end of file diff --git a/compiler/test/worksheets/denotTest.sc b/compiler/test/worksheets/denotTest.sc index 222a347b6947..aa3fb383bd6f 100644 --- a/compiler/test/worksheets/denotTest.sc +++ b/compiler/test/worksheets/denotTest.sc @@ -7,7 +7,7 @@ import Types._, Symbols._ object denotTest extends DottyTest { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + val str = defn.StringClass.typeRef //> str : dotty.tools.dotc.core.Types.TypeRef = TypeRef(ThisType(module class l //| ang#57),String) val d= str.member("getBytes".toTermName) //> d : dotty.tools.dotc.core.Denotations.Denotation = val getBytes val g @@ -47,7 +47,7 @@ object denotTest extends DottyTest { //| a#35),Array), scala$Array$$T, TypeAlias(TypeRef(ThisType(module class scala# //| 35),Char))), TypeRef(ThisType(module class scala#35),Int), TypeRef(ThisType( //| module class scala#35),Int)), TypeRef(ThisType(module class lang#57),String) - //| ), JavaMethodType(List(x$0), List(RefinedType(TypeRef(ThisType(module class + //| ), JavaMethodType(List(x$0), List(RefinedType(TypeRef(ThisType(module class //| scala#35),Array), scala$Array$$T, TypeAlias(TypeRef(ThisType(module class sc //| ala#35),Char)))), TypeRef(ThisType(module class lang#57),String)), JavaMetho //| dType(List(x$0), List(TypeRef(ThisType(module class scala#35),Any)), TypeRef diff --git a/compiler/test/worksheets/nesting.sc b/compiler/test/worksheets/nesting.sc index a6fc924320a0..bb3e9a71146e 100644 --- a/compiler/test/worksheets/nesting.sc +++ b/compiler/test/worksheets/nesting.sc @@ -2,7 +2,7 @@ package dotty.tools.dotc.core object nesting { class C { - + class D { private def x = "D" def show = x @@ -10,7 +10,7 @@ object nesting { println(x) } } - + val foo: D = { class D extends C.this.D { private def x = "foo.D" @@ -21,11 +21,11 @@ object nesting { new D } } - + val c = new C //> c : dotty.tools.dotc.core.nesting.C = dotty.tools.dotc.core.nesting$C@1a84d //| a23 val d = c.foo //> d : dotty.tools.dotc.core.nesting.c.D = dotty.tools.dotc.core.nesting$C$D$1 //| @2705d88a d.show //> res0: String = foo.D - + } \ No newline at end of file diff --git a/compiler/test/worksheets/periodtest.sc b/compiler/test/worksheets/periodtest.sc index 09c02da19a10..68a7cc43b20e 100644 --- a/compiler/test/worksheets/periodtest.sc +++ b/compiler/test/worksheets/periodtest.sc @@ -2,9 +2,9 @@ package dotty.tools.dotc.core object periodtest { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + import Periods._ - + val p1 = Period(1, 2, 7) //> p1 : dotty.tools.dotc.core.Periods.Period = Period(2..7, run = 1) val p2 = Period(1, 3, 7) //> p2 : dotty.tools.dotc.core.Periods.Period = Period(3..7, run = 1) p1 contains p2 //> res0: Boolean = true diff --git a/compiler/test/worksheets/positiontest.sc b/compiler/test/worksheets/positiontest.sc index 11cc54dbeab9..b152368145f1 100644 --- a/compiler/test/worksheets/positiontest.sc +++ b/compiler/test/worksheets/positiontest.sc @@ -5,7 +5,7 @@ import Positions._ object positiontest { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + val p = Position(0, 1, 0) //> p : dotty.tools.dotc.util.Positions.Position = [0..1] val p2 = Position(0, 2) //> p2 : dotty.tools.dotc.util.Positions.Position = [0..2] val p3 = Position(1, 0) //> p3 : dotty.tools.dotc.util.Positions.Position = [no position] diff --git a/compiler/test/worksheets/testnames.sc b/compiler/test/worksheets/testnames.sc index 282b07d4edb7..8f042b7036fd 100644 --- a/compiler/test/worksheets/testnames.sc +++ b/compiler/test/worksheets/testnames.sc @@ -2,7 +2,7 @@ package dotty.tools.dotc.core object testnames { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + import Names._ val n = termName("hello") //> n : dotty.tools.dotc.core.Names.TermName = hello val tn = n.toTypeName //> tn : dotty.tools.dotc.core.Names.TypeName = hello @@ -10,7 +10,7 @@ object testnames { assert(tn.toTermName eq n) assert(tn.toLocalName eq ln) assert(n.toLocalName eq ln) - + n == tn //> res0: Boolean = false n == ln //> res1: Boolean = false n eq tn //> res2: Boolean = false @@ -19,7 +19,7 @@ object testnames { val foo = encodedTermName("++") //> foo : dotty.tools.dotc.core.Names.TermName = $plus$plus foo.hashCode //> res5: Int = 5 foo.toTypeName.hashCode //> res6: Int = -5 - + val nfoo = n ++ foo //> nfoo : dotty.tools.dotc.core.testnames.n.ThisName = hello$plus$plus nfoo contains '$' //> res7: Boolean = true nfoo.replace('$', '.') //> res8: dotty.tools.dotc.core.testnames.nfoo.ThisName = hello.plus.plus @@ -36,7 +36,7 @@ object testnames { termName("abc") //> res18: dotty.tools.dotc.core.Names.TermName = abc nfoo.filter(_ >= 'l') //> res19: dotty.tools.dotc.core.Names.Name = lloplusplus nfoo map (_.toUpper) //> res20: dotty.tools.dotc.core.Names.Name = HELLO$PLUS$PLUS - + import Decorators._ val local = "local".toTermName.toLocalName //> local : dotty.tools.dotc.core.Names.LocalName = local diff --git a/docs/_assets/css/color-brewer.css b/docs/_assets/css/color-brewer.css deleted file mode 100644 index b832a05ebc51..000000000000 --- a/docs/_assets/css/color-brewer.css +++ /dev/null @@ -1,66 +0,0 @@ -/* - -Colorbrewer theme -Original: https://github.com/mbostock/colorbrewer-theme (c) Mike Bostock -Ported by Fabrício Tavares de Oliveira - -*/ - -/* .hljs { - background: transparent; -} - -.hljs, -.hljs-subst { - color: #000; -} */ - -/*.hljs-string, -.hljs-meta, -.hljs-symbol, -.hljs-template-tag, -.hljs-template-variable, -.hljs-addition { - color: #756bb1; -}*/ - -/* .hljs-comment, -.hljs-quote { - color: #636363; -} - -.hljs-number, -.hljs-regexp, -.hljs-literal, -.hljs-bullet, -.hljs-link { - color: #31a354; -} - -.hljs-deletion, -.hljs-variable { - color: #88f; -} */ - -/*.hljs-keyword, -.hljs-selector-tag, -.hljs-title, -.hljs-section, -.hljs-built_in, -.hljs-doctag, -.hljs-type, -.hljs-tag, -.hljs-name, -.hljs-selector-id, -.hljs-selector-class, -.hljs-strong { - color: #3182bd; -}*/ - -/* .hljs-emphasis { - font-style: italic; -} - -.hljs-attribute { - color: #e6550d; -} */ diff --git a/docs/_assets/css/frontpage.css b/docs/_assets/css/frontpage.css index a3a5c0d7dd8a..d0894fbb5052 100644 --- a/docs/_assets/css/frontpage.css +++ b/docs/_assets/css/frontpage.css @@ -28,6 +28,7 @@ h1#main { /* navigation */ header { font-size: 24px; + margin-block-end: calc(2* var(--base-spacing)); } header .nav-item i { diff --git a/docs/_assets/docsScalaLangResources/scaladoc-assets.html b/docs/_assets/docsScalaLangResources/scaladoc-assets.html deleted file mode 100644 index 504a93b25fa8..000000000000 --- a/docs/_assets/docsScalaLangResources/scaladoc-assets.html +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/docs/_docs/contributing/procedures/index.md b/docs/_docs/contributing/procedures/index.md new file mode 100644 index 000000000000..01c76f72c00c --- /dev/null +++ b/docs/_docs/contributing/procedures/index.md @@ -0,0 +1,4 @@ +--- +layout: index +title: Procedures +--- diff --git a/docs/_docs/contributing/tools/index.md b/docs/_docs/contributing/tools/index.md new file mode 100644 index 000000000000..92503ee82013 --- /dev/null +++ b/docs/_docs/contributing/tools/index.md @@ -0,0 +1,4 @@ +--- +layout: index +title: IDEs and Tools +--- diff --git a/docs/_docs/internals/backend.md b/docs/_docs/internals/backend.md index e3215c3993ae..660f6e1f41e5 100644 --- a/docs/_docs/internals/backend.md +++ b/docs/_docs/internals/backend.md @@ -6,8 +6,13 @@ title: "Backend Internals" The code for the JVM backend is split up by functionality and assembled in `GenBCode.scala`. This file defines class `GenBCode`, the compiler phase. +The workflow is split into `CodeGen.scala` Scala compilation context aware responsible for emitting bytecode, +and `PostProcessor.scala` which can be used for parallelized, context agnostic processing. In Scala 2 `PostProcessor`, +was responsible for performing bytecode optimization, e.g. inlining method calls. In Scala 3 it is only used for writing +Class files and Tasty to disk. + ``` -class GenBCodePipeline -[defines]--> PlainClassBuilder +class CodeGen.Impl -[defines]--> PlainClassBuilder | | [extends] [extends] | | @@ -18,14 +23,14 @@ BCodeBodyBuilder ----------------> PlainBodyBuilder BCodeSkelBuilder ----------------> PlainSkelBuilder | / | \ BCodeHelpers ----------------> BCClassGen BCAnnotGen ... (more components) - | | \ - | | \-------------> helper methods - | | \------------> JMirrorBuilder, JBeanInfoBuilder (uses some components, e.g. BCInnerClassGen) - | | - | BytecodeWriters ---------> methods and classes to write byte code files + | \ + | \-------------> helper methods + | \------------> JMirrorBuilder, JAndroidBuilder (uses some components, e.g. BCInnerClassGen) + | \-----------> `backendUtils`: utility for bytecode related ops, contains mapping for supported classfile version | BCodeIdiomatic ----------------> utilities for code generation, e.g. genPrimitiveArithmetic \--------------> `bTypes`: maps and fields for common BTypes + \-------------> `int`: synchronized interface between PostProcessor and compiltion ctx ``` The `BTypes.scala` class contains the `BType` class and predefined BTypes @@ -34,28 +39,33 @@ The `BTypes.scala` class contains the `BType` class and predefined BTypes Compiler creates a `GenBCode` `Phase`, calls `runOn(compilationUnits)`, which calls `run(context)`. This: -* initializes `myPrimitives` defined in `DottyPrimitives` (maps primitive - members, like `int.+`, to bytecode instructions) -* creates a `GenBCodePipeline` and calls `run(tree)` - -`GenBCodePipeline` now: - -* initializes the `bTypes` field of `GenBCodePipeline` defined in `BCodeIdiomatic` - (BType maps, common BTypes like `StringRef`) -* creates `BytecodeWriter` and `JMirrorBuilder` instances (on each compiler run) -* `buildAndSendToDisk(units)`: uses work queues, see below. - - `GenBCodePipeline.feedPipeline1` adds ClassDefs to `q1` - - `Worker1.run` creates ASM `ClassNodes`, adds to `q2`. It creates one - `PlainClassBuilder` for each compilation unit. - - `Worker2.run` adds byte arrays (one for each class) to `q3` - - `GenBCodePipeline.drainQ3` writes byte arrays to disk +* initializes lazily components reused by all `compilationUnits` using same instance of Context: + - `bTypes`, used by `CodeGen` and `PostProcessro`, defined in `BCodeIdiomatic` (BType maps, common BTypes like `StringRef`) + - `backendInterface:` - proxy to Context specific operations + - `codeGen: CodeGen` - uses `backendInterface`, `bTypes`, initializes instance of `DottyPrimitives` and defines `JMirrorBuilder` instance and implements bytecode generation flow (maps primitive members, like `int.+`, to bytecode instructions) + - `fontendAccess` - synchronized `PostProcessor` interface to compiler settings, reporting and GenBCode context (e.g. list of entrypoints) + - `postProcessor` - compilation context agnostic module dedicated to parallel processing of produced bytecode. Currently used only for writing Tasty and Class files. Defines `backendUtils` and `classfileWriter` +* sets context of current compilation unit to the shared context instance +* calls `codeGen.genUnit(ctx.compilation)` which returns structure with generated definitions (both Class files and Tasty) +* calls postProcessing of generated definition in `postProcessor` +* calls registered callbacks if needed for every generated class + +Upon calling `codeGen.genUnit` it: +* creates `PlainClassBuilder` instance for each generated `TypeDef` and creates ASM `ClassNode` +* creates optional mirror class if needed +* generates Tasty file content and store its attributes in either mirror or plain class node + +`PostProcessor` is later: +* enriching `ClassNode` with collected serializable lambdas +* sets its inner classes +* serializes class and writes it to file, optionally it can execute register callbacks for each generated file +* writes generated Tasty to file ## Architecture ## The architecture of `GenBCode` is the same as in Scalac. It can be partitioned into weakly coupled components (called "subsystems" below): - ### (a) The queue subsystem ### Queues mediate between processors, queues don't know what each processor does. @@ -126,4 +136,4 @@ emitting: ### (f) Building an ASM ClassNode given an AST TypeDef ### -It's done by `PlainClassBuilder`(see `GenBCode.scala`). +It's done by `PlainClassBuilder`(see `CodeGen.scala`). diff --git a/docs/_docs/internals/gadts.md b/docs/_docs/internals/gadts.md index 777b9dd32e39..58f511c946c3 100644 --- a/docs/_docs/internals/gadts.md +++ b/docs/_docs/internals/gadts.md @@ -1,4 +1,9 @@ -# GADTs - Broad overview +--- +layout: doc-page +title: "GADTs - Broad overview" +--- + +## Introduction There are multiple levels to the implementation. They deal with slightly different problems. The most important levels are the following ones: @@ -18,9 +23,9 @@ There are also other parts to supporting GADTs. Roughly in order of importance, 1. Attachment key is named `inferredGadtConstraints`. 4. When we select members on a type that may have GADT constraints, we perform special "healing" by approximating the type using those constraints. We cannot take the constraints into account because member lookup is cached, and GADT constraints are only valid for specific scopes. -# Useful widgets +## Useful widgets -## Expr +### Expr This is the classical GADT example: @@ -36,7 +41,7 @@ enum Expr[T] { } ``` -## EQ +### EQ The following enum will result in an equality constraint between `S` and `T` if we match on it: @@ -46,7 +51,7 @@ enum EQ[S, T] { } ``` -## SUB +### SUB The following enum will result in a subtyping constraint `S <: T` if we match on it: @@ -56,9 +61,9 @@ enum SUB[-S, +T] { } ``` -# Details of above +## Details of above -## What abstract types can have GADT constraints +### What abstract types can have GADT constraints Right now, we record GADT constraints for: @@ -67,9 +72,9 @@ Right now, we record GADT constraints for: There is a branch on the way which will also record them for type members (so path-dependent types) and singleton types. It has a paper associated: "Implementing path-depepdent GADTs for Scala 3". -## What are necessary relationships? Any examples? +### What are necessary relationships? Any examples? -### Covariance means no constraint is necessary +#### Covariance means no constraint is necessary Standard (non-case) classes allow "strange" inheritance which means that we cannot infer any information from covariant type parameters. @@ -90,7 +95,7 @@ class Weird(list: List[String]) extends IntList with Expr[Nothing] Case classes have a special check which disallows inheritance like `Weird`. This means we can infer extra information from them. -## Breaking down the constraints +### Breaking down the constraints ```scala class Expr[A] @@ -113,9 +118,9 @@ def foo[T](e: Expr[List[T]]): T = } ``` -## Relation betweeen GadtConstraint and OrderingConstraint +### Relation betweeen GadtConstraint and OrderingConstraint -### Internal and external types +#### Internal and external types GadtConstraint uses OrderingConstraint as the datastructure to record information about GADT constraints. @@ -127,9 +132,9 @@ To solve this, GadtConstraint internally creates TypeParamRefs which it adds to The TypeParamRefs and TypeVars registered in one constraint cannot ever be present in types mentioned in the other type constraint. The internal TypeParamRefs and TypeVars cannot ever leak out of the GadtConstraint. We cannot ever record a bound in GadtConstraint which mentions TypeParamRefs used for type inference. (That part is ensured by the way TypeComparer is organised – we will always try to record bounds in the "normal" constraint before recording a GADT bound.) -# Other details +## Other details -## TypeComparer approximations +### TypeComparer approximations TypeComparer sometimes approximates the types it compares. Let's see an example based on these definitions: @@ -142,11 +147,11 @@ when comparing if `IntList <: Expr[Int]`, `TypeComparer` will approximate `IntLi The variables which TypeComparer sets are `approxState` and `frozenGadt`. -## Necessary/sufficient either +### Necessary/sufficient either TypeComparer sometimes needs to approximate some constraints, specifically when dealing with intersection and union types. The way this approximation works changes if we're currently inferring GADT constraints. This is hopefully documented well in TypeComparer in doc comments for `necessaryEither` and `sufficientEither`. -## Types bound in patterns +### Types bound in patterns ```scala (list : List[Int]) match { @@ -161,7 +166,7 @@ TypeComparer sometimes needs to approximate some constraints, specifically when } ``` -## Internal structure of OrderingConstraint +### Internal structure of OrderingConstraint Imagine we have two type parameters in scope, `A` and `B`. @@ -184,19 +189,19 @@ B <: A The first two constraints are "entries" – they are easy to look up whenever we ask for bounds of `A` or `B`. The third constraint is an ordering – it helps with correctly propagating the bounds we record. -# Possible broad improvements +## Possible broad improvements -## Allow OrderingConstraint to record bounds for things other than TypeParamRefs +### Allow OrderingConstraint to record bounds for things other than TypeParamRefs This would mean we no longer need to keep the bidirectional mapping in GadtConstraint. -## Not mixing OrderingConstraint and ConstraintHandling in GadtConstraint +### Not mixing OrderingConstraint and ConstraintHandling in GadtConstraint GadtConstraint right now mixes OrderingConstraint and ConstraintHandling. The first one is supposed to be the immutable constraint datastructure. The second one implements mutable functionality around a variable containing the immutable datastructure. GadtConstraint mixes them both. Things would be better organised if GadtConstraint was split like the normal constraint. -## Creating a separate TypeComparer for breaking down types into GADT constraints +### Creating a separate TypeComparer for breaking down types into GADT constraints TypeComparer is biased towards one specific way of approximating constraints. When we infer types, it's ok to be "optimistic". When inferring GADT constraints, we should be as pessimistic as possible, in order to only infer constraints which are necessary. diff --git a/docs/_docs/internals/overall-structure.md b/docs/_docs/internals/overall-structure.md index f50ab6bf03a7..5bb43eb946a8 100644 --- a/docs/_docs/internals/overall-structure.md +++ b/docs/_docs/internals/overall-structure.md @@ -104,7 +104,6 @@ phases. The current list of phases is specified in class [Compiler] as follows: List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files List(new PostTyper) :: // Additional checks and cleanups after type checking List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) - List(new Staging) :: // Check PCP, heal quoted types and expand macros List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols Nil @@ -112,6 +111,10 @@ phases. The current list of phases is specified in class [Compiler] as follows: /** Phases dealing with TASTY tree pickling and unpickling */ protected def picklerPhases: List[List[Phase]] = List(new Pickler) :: // Generate TASTY info + List(new Inlining) :: // Inline and execute macros + List(new PostInlining) :: // Add mirror support for inlined code + List(new Staging) :: // Check staging levels and heal staged types + List(new Splicing) :: // Replace level 1 splices with holes List(new PickleQuotes) :: // Turn quoted trees into explicit run-time data structures Nil diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index bae8e6d3ec8d..445e86ee2408 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -140,7 +140,7 @@ type val var while with yield ### Soft keywords ``` -as derives end extension infix inline opaque open throws transparent using | * + - +as derives end erased extension infix inline opaque open throws transparent using | * + - ``` See the [separate section on soft keywords](../reference/soft-modifier.md) for additional @@ -180,13 +180,13 @@ Type ::= FunType | FunParamClause ‘=>>’ Type TermLambdaTypeTree(ps, t) | MatchType | InfixType -FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) +FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) | FunctionWithMods(ts, t, mods, erasedParams) | HKTypeParamClause '=>' Type PolyFunction(ps, t) FunTypeArgs ::= InfixType | ‘(’ [ FunArgTypes ] ‘)’ | FunParamClause FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ -TypedFunParam ::= id ‘:’ Type +TypedFunParam ::= [`erased`] id ‘:’ Type MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) @@ -207,11 +207,12 @@ Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id Singletons ::= Singleton { ‘,’ Singleton } -FunArgType ::= Type - | ‘=>’ Type PrefixOp(=>, t) +FunArgType ::= [`erased`] Type + | [`erased`] ‘=>’ Type PrefixOp(=>, t) FunArgTypes ::= FunArgType { ‘,’ FunArgType } ParamType ::= [‘=>’] ParamValueType -ParamValueType ::= Type [‘*’] PostfixOp(t, "*") +ParamValueType ::= [‘into’] ExactParamType Into(t) +ExactParamType ::= ParamValueType [‘*’] PostfixOp(t, "*") TypeArgs ::= ‘[’ Types ‘]’ ts Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) @@ -228,7 +229,7 @@ BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block | HkTypeParamClause ‘=>’ Block | Expr1 FunParams ::= Bindings - | id + | [`erased`] id | ‘_’ Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] If(Parens(cond), thenp, elsep?) | [‘inline’] ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) @@ -318,7 +319,10 @@ TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } TypeCaseClause ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi] Pattern ::= Pattern1 { ‘|’ Pattern1 } Alternative(pats) -Pattern1 ::= Pattern2 [‘:’ RefinedType] Bind(name, Typed(Ident(wildcard), tpe)) +Pattern1 ::= PatVar ‘:’ RefinedType Bind(name, Typed(Ident(wildcard), tpe)) + | [‘-’] integerLiteral ‘:’ RefinedType Typed(pat, tpe) + | [‘-’] floatingPointLiteral ‘:’ RefinedType Typed(pat, tpe) + | Pattern2 Pattern2 ::= [id ‘@’] InfixPattern [‘*’] Bind(name, pat) InfixPattern ::= SimplePattern { id [nl] SimplePattern } InfixOp(pat, op, pat) SimplePattern ::= PatVar Ident(wildcard) @@ -343,9 +347,6 @@ ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) -DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds - TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -359,18 +360,29 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param -Param ::= id ‘:’ ParamType [‘=’ Expr] -DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] -DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause -UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | FunArgTypes) ‘)’ -DefParams ::= DefParam {‘,’ DefParam} -DefParam ::= {Annotation} [‘inline’] Param ValDef(mods, id, tpe, expr) -- point of mods at id. +DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent +DefParamClause ::= DefTypeParamClause + | DefTermParamClause + | UsingParamClause +TypelessClauses ::= TypelessClause {TypelessClause} +TypelessClause ::= DefTermParamClause + | UsingParamClause + +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ +DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ + +DefTermParams ::= DefTermParam {‘,’ DefTermParam} +DefTermParam ::= {Annotation} [`erased`] [‘inline’] Param ValDef(mods, id, tpe, expr) -- point of mods at id. +Param ::= id ‘:’ ParamType [‘=’ Expr] ``` ### Bindings and Imports ```ebnf -Bindings ::= ‘(’ [Binding {‘,’ Binding}] ‘)’ +Bindings ::= ‘(’[`erased`] [Binding {‘,’ [`erased`] Binding}] ‘)’ Binding ::= (id | ‘_’) [‘:’ Type] ValDef(_, id, tpe, EmptyTree) Modifier ::= LocalModifier @@ -415,8 +427,8 @@ Dcl ::= RefineDcl | ‘var’ VarDcl ValDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) VarDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) -DefDcl ::= DefSig ‘:’ Type DefDef(_, name, tparams, vparamss, tpe, EmptyTree) -DefSig ::= id [DefTypeParamClause] DefParamClauses +DefDcl ::= DefSig ‘:’ Type DefDef(_, name, paramss, tpe, EmptyTree) +DefSig ::= id [DefParamClauses] [DefImplicitClause] TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] @@ -427,8 +439,8 @@ Def ::= ‘val’ PatDef | TmplDef PatDef ::= ids [‘:’ Type] ‘=’ Expr | Pattern2 [‘:’ Type] ‘=’ Expr PatDef(_, pats, tpe?, expr) -DefDef ::= DefSig [‘:’ Type] ‘=’ Expr DefDef(_, name, tparams, vparamss, tpe, expr) - | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr DefDef(_, , Nil, vparamss, EmptyTree, expr | Block) +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr DefDef(_, name, paramss, tpe, expr) + | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef @@ -440,10 +452,10 @@ ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefParamClause`, `UsingParamClause` must be present +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} - ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods + ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef | Export diff --git a/docs/_docs/reference/changed-features/compiler-plugins.md b/docs/_docs/reference/changed-features/compiler-plugins.md index 20bdb7f49836..82d38bd44d96 100644 --- a/docs/_docs/reference/changed-features/compiler-plugins.md +++ b/docs/_docs/reference/changed-features/compiler-plugins.md @@ -4,18 +4,18 @@ title: "Changes in Compiler Plugins" nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/compiler-plugins.html --- -Compiler plugins are supported by Dotty (and Scala 3) since 0.9. There are two notable changes -compared to `scalac`: +Compiler plugins are supported in Scala 3 since Dotty 0.9. There are two notable changes +compared to Scala 2: - No support for analyzer plugins - Added support for research plugins -[Analyzer plugins][1] in `scalac` run during type checking and may influence +[Analyzer plugins][1] run in Scala 2 during type checking and may influence normal type checking. This is a very powerful feature but for production usages, a predictable and consistent type checker is more important. For experimentation and research, Scala 3 introduces _research plugin_. Research plugins -are more powerful than `scalac` analyzer plugins as they let plugin authors customize +are more powerful than Scala 2 analyzer plugins as they let plugin authors customize the whole compiler pipeline. One can easily replace the standard typer by a custom one or create a parser for a domain-specific language. However, research plugins are only enabled for nightly or snaphot releases of Scala 3. @@ -26,7 +26,7 @@ _standard plugins_ in Scala 3. In terms of features, they are similar to ## Using Compiler Plugins -Both standard and research plugins can be used with `scalac` by adding the `-Xplugin:` option: +In Scala 3, both standard and research plugins can be used with `scalac` by adding the `-Xplugin:` option: ```shell scalac -Xplugin:pluginA.jar -Xplugin:pluginB.jar Test.scala @@ -40,7 +40,7 @@ the fully qualified plugin class name. The format of a property file is as follo pluginClass=dividezero.DivideZero ``` -This is different from `scalac` plugins that required a `scalac-plugin.xml` file. +This is different from Scala 2 plugins that require a `scalac-plugin.xml` file. Starting from 1.1.5, `sbt` also supports Scala 3 compiler plugins. Please refer to the [`sbt` documentation][2] for more information. diff --git a/docs/_docs/reference/changed-features/eta-expansion-spec.md b/docs/_docs/reference/changed-features/eta-expansion-spec.md index a62d45df9e11..714ab37ae11a 100644 --- a/docs/_docs/reference/changed-features/eta-expansion-spec.md +++ b/docs/_docs/reference/changed-features/eta-expansion-spec.md @@ -51,7 +51,7 @@ implicit val bla: Double = 1.0 val bar = foo // val bar: Int => Float = ... ``` -## Automatic Eta-Expansion and query types +## Automatic Eta-Expansion and context types A method with context parameters can be expanded to a value of a context type by writing the expected context type explicitly. @@ -66,7 +66,7 @@ val bar: Double ?=> Float = foo(3) - If `m` is has an empty argument list (i.e. has type `()R`): 1. If the expected type is of the form `() => T`, we eta expand. 2. If m is defined by Java, or overrides a Java defined method, we insert `()`. - 3. Otherwise we issue an error of the form: + 3. Otherwise we issue an error of the form: `method must be called with () argument` Thus, an unapplied method with an empty argument list is only converted to a function when a function type is expected. It is considered best practice to either explicitly apply the method to `()`, or convert it to a function with `() => m()`. diff --git a/docs/_docs/reference/changed-features/implicit-resolution.md b/docs/_docs/reference/changed-features/implicit-resolution.md index bf15baa3299c..6a898690b565 100644 --- a/docs/_docs/reference/changed-features/implicit-resolution.md +++ b/docs/_docs/reference/changed-features/implicit-resolution.md @@ -67,7 +67,8 @@ Opaque type aliases count as anchors only outside the scope where their alias is 1. If _T_ is a reference to a type parameter, the union of the anchors of both of its bounds. 1. If _T_ is a singleton reference, the anchors of its underlying type, plus, if _T_ is of the form _(P#x).type_, the anchors of _P_. - 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object. + 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object, + 1. If _T_ is some other this-type _P.this.type_, the anchors of _P_. 1. If _T_ is some other type, the union of the anchors of each constituent type of _T_. **Definition:** The _implicit scope_ of a type _T_ is the smallest set _S_ of term references such that diff --git a/docs/_docs/reference/changed-features/imports.md b/docs/_docs/reference/changed-features/imports.md index 2058ef08b7db..b322a6a58393 100644 --- a/docs/_docs/reference/changed-features/imports.md +++ b/docs/_docs/reference/changed-features/imports.md @@ -46,7 +46,7 @@ are offered under settings `-source 3.1-migration -rewrite`. ## Syntax -``` +```ebnf Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec | SimpleRef `as` id diff --git a/docs/_docs/reference/changed-features/interpolation-escapes.md b/docs/_docs/reference/changed-features/interpolation-escapes.md index 594e7671c5ab..4abeabdce3ac 100644 --- a/docs/_docs/reference/changed-features/interpolation-escapes.md +++ b/docs/_docs/reference/changed-features/interpolation-escapes.md @@ -4,7 +4,7 @@ title: "Escapes in interpolations" nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/interpolation-escapes.html --- -In Scala 2 there is no straightforward way to represent a single quote character `"` in a single quoted interpolation. A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. +In Scala 2 there is no straightforward way to represent a double-quote character `"` in a quoted interpolation (except in triple-quote interpolation). A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. In Scala 3, we can use the `$` meta character of interpolations to escape a `"` character. Example: diff --git a/docs/_docs/reference/changed-features/main-functions.md b/docs/_docs/reference/changed-features/main-functions.md index 4460300d003e..8b035053ad63 100644 --- a/docs/_docs/reference/changed-features/main-functions.md +++ b/docs/_docs/reference/changed-features/main-functions.md @@ -57,7 +57,7 @@ The Scala compiler generates a program from a [`@main`](https://scala-lang.org/a - The generated `main` method calls method `f` with arguments converted using methods in the [`scala.util.CommandLineParser`](https://scala-lang.org/api/3.x/scala/util/CommandLineParser$.html) object. -For instance, the `happyBirthDay` method above would generate additional code equivalent to the following class: +For instance, the `happyBirthday` method above would generate additional code equivalent to the following class: ```scala final class happyBirthday: @@ -72,8 +72,8 @@ final class happyBirthday: case error: CLP.ParseError => CLP.showError(error) ``` -**Note**: The `` modifier above expresses that the `main` method is generated -as a static method of class `happyBirthDay`. It is not available for user programs in Scala. Regular "static" members are generated in Scala using objects instead. +**Note:** The `` modifier above expresses that the `main` method is generated +as a static method of class `happyBirthday`. It is not available for user programs in Scala. Regular "static" members are generated in Scala using objects instead. [`@main`](https://scala-lang.org/api/3.x/scala/main.html) methods are the recommended scheme to generate programs that can be invoked from the command line in Scala 3. They replace the previous scheme to write program as objects with a special `App` parent class. In Scala 2, `happyBirthday` could be written also like this: diff --git a/docs/_docs/reference/changed-features/match-syntax.md b/docs/_docs/reference/changed-features/match-syntax.md index dba50e9beb6a..3f4d608e261f 100644 --- a/docs/_docs/reference/changed-features/match-syntax.md +++ b/docs/_docs/reference/changed-features/match-syntax.md @@ -47,7 +47,7 @@ The syntactical precedence of match expressions has been changed. The new syntax of match expressions is as follows. -``` +```ebnf InfixExpr ::= ... | InfixExpr MatchClause SimpleExpr ::= ... diff --git a/docs/_docs/reference/changed-features/pattern-bindings.md b/docs/_docs/reference/changed-features/pattern-bindings.md index 2de338fc1dde..a75d64e7cd2d 100644 --- a/docs/_docs/reference/changed-features/pattern-bindings.md +++ b/docs/_docs/reference/changed-features/pattern-bindings.md @@ -50,7 +50,7 @@ for case (x, y) <- elems yield (y, x) // returns List((2, 1), (4, 3)) ## Syntax Changes Generators in for expressions may be prefixed with `case`. -``` +```ebnf Generator ::= [‘case’] Pattern1 ‘<-’ Expr ``` diff --git a/docs/_docs/reference/changed-features/pattern-matching.md b/docs/_docs/reference/changed-features/pattern-matching.md index 30ae5d9dc104..fed76ff9260d 100644 --- a/docs/_docs/reference/changed-features/pattern-matching.md +++ b/docs/_docs/reference/changed-features/pattern-matching.md @@ -13,26 +13,38 @@ Scala 3 supports a superset of Scala 2 [extractors](https://www.scala-lang.org/f Extractors are objects that expose a method `unapply` or `unapplySeq`: ```scala -def unapply[A](x: T)(implicit x: B): U -def unapplySeq[A](x: T)(implicit x: B): U +def unapply(x: T): U +def unapplySeq(x: T): U +``` + +Where `T` is an arbitrary type, if it is a subtype of the scrutinee's type `Scrut`, a [type test](../other-new-features/type-test.md) is performed before calling the method. +`U` follows rules described in [Fixed Arity Extractors](#fixed-arity-extractors) and [Variadic Extractors](#variadic-extractors). + +**Note:** `U` can be the type of the extractor object. + +`unapply` and `unapplySeq` can actually have a more general signature, allowing for a leading type clause, as well as arbitrarily many using clauses, both before and after the regular term clause, and at most one implicit clause at the end, for example: + +```scala +def unapply[A, B](using C)(using D)(x: T)(using E)(using F)(implicit y: G): U = ??? ``` Extractors that expose the method `unapply` are called fixed-arity extractors, which work with patterns of fixed arity. Extractors that expose the method `unapplySeq` are called variadic extractors, which enables variadic patterns. -### Fixed-Arity Extractors +## Fixed-Arity Extractors + +Fixed-arity extractors expose the following signature (with potential type, using and implicit clauses): -Fixed-arity extractors expose the following signature: ```scala -def unapply[A](x: T)(implicit x: B): U +def unapply(x: T): U ``` The type `U` conforms to one of the following matches: -- Boolean match -- Product match +- [Boolean match](#boolean-match) +- [Product match](#product-match) Or `U` conforms to the type `R`: @@ -45,53 +57,24 @@ type R = { and `S` conforms to one of the following matches: -- single match -- name-based match +- [single match](#single-match) +- [name-based match](#name-based-match) The former form of `unapply` has higher precedence, and _single match_ has higher precedence over _name-based match_. +**Note:** the `S` in `R` can be `U`. + A usage of a fixed-arity extractor is irrefutable if one of the following condition holds: - `U = true` - the extractor is used as a product match -- `U = Some[T]` (for Scala 2 compatibility) - `U <: R` and `U <: { def isEmpty: false }` +- `U = Some[T]` -### Variadic Extractors - -Variadic extractors expose the following signature: - -```scala -def unapplySeq[A](x: T)(implicit x: B): U -``` - -The type `U` conforms to one of the following matches: - -- sequence match -- product-sequence match - -Or `U` conforms to the type `R`: - -```scala -type R = { - def isEmpty: Boolean - def get: S -} -``` - -and `S` conforms to one of the two matches above. - -The former form of `unapplySeq` has higher priority, and _sequence match_ has higher -precedence over _product-sequence match_. - -A usage of a variadic extractor is irrefutable if one of the following conditions holds: - -- the extractor is used directly as a sequence match or product-sequence match -- `U = Some[T]` (for Scala 2 compatibility) -- `U <: R` and `U <: { def isEmpty: false }` +**Note:** The last rule is necessary because, for compatibility reasons, `isEmpty` on `Some` has return type `Boolean` rather than `false`, even though it always returns `false`. -## Boolean Match +### Boolean Match - `U =:= Boolean` - Pattern-matching on exactly `0` patterns @@ -111,10 +94,10 @@ object Even: // even has an even number of characters ``` -## Product Match +### Product Match - `U <: Product` -- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- `N > 0` is the maximum number of consecutive (`val` or parameterless `def`) `_1: P1` ... `_N: PN` members in `U` - Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` For example: @@ -141,9 +124,11 @@ object FirstChars: // First: H; Second: i ``` -## Single Match +### Single Match -- If there is exactly `1` pattern, pattern-matching on `1` pattern with type `U` +- Pattern-matching on `1` pattern with type `S` + +For example, where `Nat <: R`, `S = Int`: @@ -162,27 +147,72 @@ object Nat: // 5 is a natural number ``` -## Name-based Match +### Name-based Match -- `N > 1` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1 ... _N: PN` members in `U` +- `S` has `N > 1` members such that they are each `val`s or parameterless `def`s, and named from `_1` with type `P1` to `_N` with type `PN` +- `S` doesn't have `N+1` members satisfying the previous point, i.e. `N` is maximal - Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` +For example, where `U = AlwaysEmpty.type <: R`, `S = NameBased`: ```scala -object ProdEmpty: +object MyPatternMatcher: + def unapply(s: String) = AlwaysEmpty + +object AlwaysEmpty: + def isEmpty = true + def get = NameBased + +object NameBased: def _1: Int = ??? def _2: String = ??? - def isEmpty = true - def unapply(s: String): this.type = this - def get = this "" match - case ProdEmpty(_, _) => ??? + case MyPatternMatcher(_, _) => ??? case _ => () ``` -## Sequence Match +## Variadic Extractors + +Variadic extractors expose the following signature (with potential type, using and implicit clauses): + +```scala +def unapplySeq(x: T): U +``` + +Where `U` has to fullfill the following: + +1. Set `V := U` +2. `V` is valid if `V` conforms to one of the following matches: +- [sequence match](#sequence-match) +- [product-sequence match](#product-sequence-match) +3. Otherwise `U` has to conform to the type `R`: +```scala +type R = { + def isEmpty: Boolean + def get: S +} +``` +4. Set `V := S`, and reattempt 2., if it fails `U` is not valid. + +The `V := U` form of `unapplySeq` has higher priority, and _sequence match_ has higher +precedence over _product-sequence match_. + +**Note:** This means `isEmpty` is disregarded if the `V := U` form is valid + +A usage of a variadic extractor is irrefutable if one of the following conditions holds: + +- the extractor is used directly as a sequence match or product-sequence match +- `U <: R` and `U <: { def isEmpty: false }` +- `U = Some[T]` + +**Note:** The last rule is necessary because, for compatibility reasons, `isEmpty` on `Some` has return type `Boolean` rather than `false`, even though it always returns `false`. + +**Note:** Be careful, by the first condition and the note above, it is possible to define an irrefutable extractor with a `def isEmpty: true`. +This is strongly discouraged and, if found in the wild, is almost certainly a bug. + +### Sequence Match -- `U <: X`, `T2` and `T3` conform to `T1` +- `V <: X` ```scala type X = { @@ -192,10 +222,12 @@ type X = { def toSeq: scala.Seq[T3] } ``` - +- `T2` and `T3` conform to `T1` - Pattern-matching on _exactly_ `N` simple patterns with types `T1, T1, ..., T1`, where `N` is the runtime size of the sequence, or - Pattern-matching on `>= N` simple patterns and _a vararg pattern_ (e.g., `xs: _*`) with types `T1, T1, ..., T1, Seq[T1]`, where `N` is the minimum size of the sequence. +For example, where `V = S`, `U = Option[S] <: R`, `S = Seq[Char]` + ```scala @@ -211,14 +243,16 @@ object CharList: // e,x,a,m ``` -## Product-Sequence Match +### Product-Sequence Match -- `U <: Product` -- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- `V <: Product` +- `N > 0` is the maximum number of consecutive (`val` or parameterless `def`) `_1: P1` ... `_N: PN` members in `V` - `PN` conforms to the signature `X` defined in Seq Pattern - Pattern-matching on exactly `>= N` patterns, the first `N - 1` patterns have types `P1, P2, ... P(N-1)`, the type of the remaining patterns are determined as in Seq Pattern. +For example, where `V = S`, `U = Option[S] <: R`, `S = (String, PN) <: Product`, `PN = Seq[Int]` + ```scala class Foo(val name: String, val children: Int*) object Foo: @@ -227,7 +261,7 @@ object Foo: def foo(f: Foo) = f match case Foo(name, x, y, ns*) => ">= two children." - case Foo(name, ns*) => => "< two children." + case Foo(name, ns*) => "< two children." ``` There are plans for further simplification, in particular to factor out _product match_ diff --git a/docs/_docs/reference/changed-features/structural-types-spec.md b/docs/_docs/reference/changed-features/structural-types-spec.md index d456932649fb..18d0f31ee6fe 100644 --- a/docs/_docs/reference/changed-features/structural-types-spec.md +++ b/docs/_docs/reference/changed-features/structural-types-spec.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/structu ## Syntax -``` +```ebnf SimpleType ::= ... | Refinement Refinement ::= ‘{’ RefineStatSeq ‘}’ RefineStatSeq ::= RefineStat {semi RefineStat} diff --git a/docs/_docs/reference/changed-features/structural-types.md b/docs/_docs/reference/changed-features/structural-types.md index 37e583332cf1..d8cd4f867092 100644 --- a/docs/_docs/reference/changed-features/structural-types.md +++ b/docs/_docs/reference/changed-features/structural-types.md @@ -35,19 +35,41 @@ configure how fields and methods should be resolved. Here's an example of a structural type `Person`: ```scala - class Record(elems: (String, Any)*) extends Selectable: - private val fields = elems.toMap - def selectDynamic(name: String): Any = fields(name) +type Person = Record { val name: String; val age: Int } +``` + +The type `Person` adds a _refinement_ to its parent type `Record` that defines the two fields `name` and `age`. We say the refinement is _structural_ since `name` and `age` are not defined in the parent type. But they exist nevertheless as members of type `Person`. - type Person = Record { val name: String; val age: Int } - ``` +This allows us to check at compiletime if accesses are valid: -The type `Person` adds a _refinement_ to its parent type `Record` that defines the two fields `name` and `age`. We say the refinement is _structural_ since `name` and `age` are not defined in the parent type. But they exist nevertheless as members of class `Person`. For instance, the following -program would print "Emma is 42 years old.": +```scala +val person: Person = ??? +println(s"${person.name} is ${person.age} years old.") // works +println(person.email) // error: value email is not a member of Person +``` +How is `Record` defined, and how does `person.name` resolve ? + +`Record` is a class that extends the marker trait [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) and defines +a method `selectDynamic`, which maps a field name to its value. +Selecting a member of a structural type is syntactic sugar for a call to this method. +The selections `person.name` and `person.age` are translated by +the Scala compiler to: ```scala - val person = Record("name" -> "Emma", "age" -> 42).asInstanceOf[Person] - println(s"${person.name} is ${person.age} years old.") +person.selectDynamic("name").asInstanceOf[String] +person.selectDynamic("age").asInstanceOf[Int] +``` + +For example, `Record` could be defined as follows: + +```scala +class Record(elems: (String, Any)*) extends Selectable: + private val fields = elems.toMap + def selectDynamic(name: String): Any = fields(name) +``` +Which allows us to create instances of `Person` like so: +```scala +val person = Record("name" -> "Emma", "age" -> 42).asInstanceOf[Person] ``` The parent type `Record` in this example is a generic class that can represent arbitrary records in its `elems` argument. This argument is a @@ -59,52 +81,45 @@ help from the user. In practice, the connection between a structural type and its underlying generic representation would most likely be done by a database layer, and therefore would not be a concern of the end user. -`Record` extends the marker trait [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) and defines -a method `selectDynamic`, which maps a field name to its value. -Selecting a structural type member is done by calling this method. -The `person.name` and `person.age` selections are translated by -the Scala compiler to: - -```scala - person.selectDynamic("name").asInstanceOf[String] - person.selectDynamic("age").asInstanceOf[Int] -``` - Besides `selectDynamic`, a `Selectable` class sometimes also defines a method `applyDynamic`. This can then be used to translate function calls of structural members. So, if `a` is an instance of `Selectable`, a structural call like `a.f(b, c)` would translate to ```scala - a.applyDynamic("f")(b, c) +a.applyDynamic("f")(b, c) ``` ## Using Java Reflection -Structural types can also be accessed using [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html). Example: +Using `Selectable` and [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html), we can select a member from unrelated classes. + +> Before resorting to structural calls with Java reflection one should consider alternatives. For instance, sometimes a more a modular _and_ efficient architecture can be obtained using [type classes](../contextual/type-classes.md). + +For example, we would like to provide behavior for both [`FileInputStream`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/FileInputStream.html#%3Cinit%3E(java.io.File)) and [`Channel`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/channels/Channel.html) classes by calling their `close` method, however, these classes are unrelated, i.e. have no common supertype with a `close` method. Therefore, below we define a structural type `Closeable` that defines a `close` method. ```scala - type Closeable = { def close(): Unit } +type Closeable = { def close(): Unit } - class FileInputStream: - def close(): Unit +class FileInputStream: + def close(): Unit - class Channel: - def close(): Unit +class Channel: + def close(): Unit ``` -Here, we define a structural type `Closeable` that defines a `close` method. There are various classes that have `close` methods, we just list [`FileInputStream`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/FileInputStream.html#%3Cinit%3E(java.io.File)) and [`Channel`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/channels/Channel.html) as two examples. It would be easiest if the two classes shared a common interface that factors out the `close` method. But such factorings are often not possible if different libraries are combined in one application. Yet, we can still have methods that work on -all classes with a `close` method by using the `Closeable` type. For instance, +Ideally we would add a common interface to both these classes to define the `close` method, however they are defined in libraries outside of our control. As a compromise we can use the structural type to define a single implementation for an `autoClose` method: + + ```scala - import scala.reflect.Selectable.reflectiveSelectable +import scala.reflect.Selectable.reflectiveSelectable - def autoClose(f: Closeable)(op: Closeable => Unit): Unit = - try op(f) finally f.close() +def autoClose(f: Closeable)(op: Closeable => Unit): Unit = + try op(f) finally f.close() ``` -The call `f.close()` has to use Java reflection to identify and call the `close` method in the receiver `f`. This needs to be enabled by an import -of `reflectiveSelectable` shown above. What happens "under the hood" is then the following: +The call `f.close()` requires `Closeable` to extend `Selectable` to identify and call the `close` method in the receiver `f`. A universal implicit conversion to `Selectable` is enabled by an import +of `reflectiveSelectable` shown above, based on [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html). What happens "under the hood" is then the following: - - The import makes available an implicit conversion that turns any type into a - `Selectable`. `f` is wrapped in this conversion. + - The implicit conversion wraps `f` in an instance of `scala.reflect.Selectable` (which is a subtype of `Selectable`). - The compiler then transforms the `close` call on the wrapped `f` to an `applyDynamic` call. The end result is: @@ -113,7 +128,7 @@ of `reflectiveSelectable` shown above. What happens "under the hood" is then the reflectiveSelectable(f).applyDynamic("close")() ``` - The implementation of `applyDynamic` in `reflectiveSelectable`'s result -uses Java reflection to find and call a method `close` with zero parameters in the value referenced by `f` at runtime. +uses [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html) to find and call a method `close` with zero parameters in the value referenced by `f` at runtime. Structural calls like this tend to be much slower than normal method calls. The mandatory import of `reflectiveSelectable` serves as a signpost that something inefficient is going on. @@ -121,8 +136,6 @@ Structural calls like this tend to be much slower than normal method calls. The `reflectiveSelectable` conversion. However, to warn against inefficient dispatch, Scala 2 requires a language import `import scala.language.reflectiveCalls`. -Before resorting to structural calls with Java reflection one should consider alternatives. For instance, sometimes a more modular _and_ efficient architecture can be obtained using type classes. - ## Extensibility New instances of `Selectable` can be defined to support means of @@ -179,13 +192,10 @@ differences. is, as long as the correspondence of the structural type with the underlying value is as stated. -- [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) is just a marker trait, which gives more leeway where and - how to define reflective access operations. By contrast - `Selectable` is a trait which declares the access operations. - - Two access operations, `selectDynamic` and `applyDynamic` are shared between both approaches. In `Selectable`, `applyDynamic` also may also take [`java.lang.Class`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Class.html) arguments indicating the method's formal parameter types. - [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) comes with `updateDynamic`. + +- `updateDynamic` is unique to [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) but as mentionned before, this fact is subject to change, and shouldn't be used as an assumption. [More details](structural-types-spec.md) diff --git a/docs/_docs/reference/changed-features/vararg-splices.md b/docs/_docs/reference/changed-features/vararg-splices.md index 43c4acc5f880..8f23af771216 100644 --- a/docs/_docs/reference/changed-features/vararg-splices.md +++ b/docs/_docs/reference/changed-features/vararg-splices.md @@ -24,7 +24,7 @@ The old syntax for splice arguments will be phased out. ## Syntax -``` +```ebnf ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ | ‘(’ [Patterns ‘,’] Pattern2 ‘*’ ‘)’ diff --git a/docs/_docs/reference/contextual/by-name-context-parameters.md b/docs/_docs/reference/contextual/by-name-context-parameters.md index 3004bfb2c4c2..3515efd78fa5 100644 --- a/docs/_docs/reference/contextual/by-name-context-parameters.md +++ b/docs/_docs/reference/contextual/by-name-context-parameters.md @@ -53,7 +53,7 @@ In the example above, the definition of `s` would be expanded as follows. ```scala val s = summon[Test.Codec[Option[Int]]]( - optionCodec[Int](using intCodec) + using optionCodec[Int](using intCodec) ) ``` diff --git a/docs/_docs/reference/contextual/context-bounds.md b/docs/_docs/reference/contextual/context-bounds.md index 42479d6802b3..11d57c8cbd52 100644 --- a/docs/_docs/reference/contextual/context-bounds.md +++ b/docs/_docs/reference/contextual/context-bounds.md @@ -47,7 +47,7 @@ done automatically under `-rewrite`. ## Syntax -``` +```ebnf TypeParamBounds ::= [SubtypeBounds] {ContextBound} ContextBound ::= ‘:’ Type ``` diff --git a/docs/_docs/reference/contextual/context-functions-spec.md b/docs/_docs/reference/contextual/context-functions-spec.md index 109513e9da86..385ee3901fd8 100644 --- a/docs/_docs/reference/contextual/context-functions-spec.md +++ b/docs/_docs/reference/contextual/context-functions-spec.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-funct ## Syntax -``` +```ebnf Type ::= ... | FunArgTypes ‘?=>’ Type Expr ::= ... diff --git a/docs/_docs/reference/contextual/derivation.md b/docs/_docs/reference/contextual/derivation.md index bad47dcb0096..853f7868aa9a 100644 --- a/docs/_docs/reference/contextual/derivation.md +++ b/docs/_docs/reference/contextual/derivation.md @@ -5,9 +5,9 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/derivation.ht --- Type class derivation is a way to automatically generate given instances for type classes which satisfy some simple -conditions. A type class in this sense is any trait or class with a type parameter determining the type being operated -on. Common examples are `Eq`, `Ordering`, or `Show`. For example, given the following `Tree` algebraic data type -(ADT), +conditions. A type class in this sense is any trait or class with a single type parameter determining the type being operated +on, and the special case `CanEqual`. Common examples are `Eq`, `Ordering`, or `Show`. For example, given the following `Tree` algebraic data type +(ADT): ```scala enum Tree[T] derives Eq, Ordering, Show: @@ -16,7 +16,7 @@ enum Tree[T] derives Eq, Ordering, Show: ``` The `derives` clause generates the following given instances for the `Eq`, `Ordering` and `Show` type classes in the -companion object of `Tree`, +companion object of `Tree`: ```scala given [T: Eq] : Eq[Tree[T]] = Eq.derived @@ -26,12 +26,138 @@ given [T: Show] : Show[Tree[T]] = Show.derived We say that `Tree` is the _deriving type_ and that the `Eq`, `Ordering` and `Show` instances are _derived instances_. -## Types supporting `derives` clauses +**Note:** `derived` can be used manually, this is useful when you do not have control over the definition. For example we can implement `Ordering` for `Option`s like so: + +```scala +given [T: Ordering]: Ordering[Option[T]] = Ordering.derived +``` + +It is discouraged to directly refer to the `derived` member if you can use a `derives` clause instead. All data types can have a `derives` clause. This document focuses primarily on data types which also have a given instance of the `Mirror` type class available. -`Mirror` type class instances provide information at the type level about the components and labelling of the type. +## Exact mechanism +In the following, when type arguments are enumerated and the first index evaluates to a larger value than the last, then there are actually no arguments, for example: `A[T_2, ..., T_1]` means `A`. + +For a class/trait/object/enum `DerivingType[T_1, ..., T_N] derives TC`, a derived instance is created in `DerivingType`'s companion object (or `DerivingType` itself if it is an object). + +The general "shape" of the derived instance is as follows: +```scala +given [...](using ...): TC[ ... DerivingType[...] ... ] = TC.derived +``` +`TC.derived` should be an expression that conforms to the expected type on the left, potentially elaborated using term and/or type inference. + +**Note:** `TC.derived` is a normal access, therefore if there are multiple definitions of `TC.derived`, overloading resolution applies. + +What the derived instance precisely looks like depends on the specifics of `DerivingType` and `TC`, we first examine `TC`: + +### `TC` takes 1 parameter `F` + +Therefore `TC` is defined as `TC[F[A_1, ..., A_K]]` (`TC[F]` if `K == 0`) for some `F`. +There are two further cases depending on the kinds of arguments: + +#### `F` and all arguments of `DerivingType` have kind `*` +**Note:** `K == 0` in this case. + +The generated instance is then: +```scala +given [T_1: TC, ..., T_N: TC]: TC[DerivingType[T_1, ..., T_N]] = TC.derived +``` + +This is the most common case, and is the one that was highlighted in the introduction. + +**Note:** The `[T_i: TC, ...]` introduces a `(using TC[T_i], ...)`, more information in [Context Bounds](./context-bounds.md). +This allows the `derived` member to access these evidences. + +**Note:** If `N == 0` the above means: +```scala +given TC[DerivingType] = TC.derived +``` +For example, the class +```scala +case class Point(x: Int, y: Int) derives Ordering +``` +generates the instance +```scala +object Point: + ... + given Ordering[Point] = Ordering.derived +``` + + +#### `F` and `DerivingType` have parameters of matching kind on the right +This section covers cases where you can pair arguments of `F` and `DerivingType` starting from the right such that they have the same kinds pairwise, and all arguments of `F` or `DerivingType` (or both) are used up. +`F` must also have at least one parameter. + +The general shape will then be: +```scala +given [...]: TC[ [...] =>> DerivingType[...] ] = TC.derived +``` +Where of course `TC` and `DerivingType` are applied to types of the correct kind. + +To make this work, we split it into 3 cases: + +If `F` and `DerivingType` take the same number of arguments (`N == K`): +```scala +given TC[DerivingType] = TC.derived +// simplified form of: +given TC[ [A_1, ..., A_K] =>> DerivingType[A_1, ..., A_K] ] = TC.derived +``` +If `DerivingType` takes less arguments than `F` (`N < K`), we use only the rightmost parameters from the type lambda: +```scala +given TC[ [A_1, ..., A_K] =>> DerivingType[A_(K-N+1), ..., A_K] ] = TC.derived + +// if DerivingType takes no arguments (N == 0), the above simplifies to: +given TC[ [A_1, ..., A_K] =>> DerivingType ] = TC.derived +``` + +If `F` takes less arguments than `DerivingType` (`K < N`), we fill in the remaining leftmost slots with type parameters of the given: +```scala +given [T_1, ... T_(N-K)]: TC[[A_1, ..., A_K] =>> DerivingType[T_1, ... T_(N-K), A_1, ..., A_K]] = TC.derived +``` + +### `TC` is the `CanEqual` type class + +We have therefore: `DerivingType[T_1, ..., T_N] derives CanEqual`. + +Let `U_1`, ..., `U_M` be the parameters of `DerivingType` of kind `*`. +(These are a subset of the `T_i`s) + +The generated instance is then: +```scala +given [T_1L, T_1R, ..., T_NL, T_NR] // every parameter of DerivingType twice + (using CanEqual[U_1L, U_1R], ..., CanEqual[U_ML, U_MR]): // only parameters of DerivingType with kind * + CanEqual[DerivingType[T_1L, ..., T_NL], DerivingType[T_1R, ..., T_NR]] = // again, every parameter + CanEqual.derived +``` + +The bounds of `T_i`s are handled correctly, for example: `T_2 <: T_1` becomes `T_2L <: T_1L`. + +For example, the class +```scala +class MyClass[A, G[_]](a: A, b: G[B]) derives CanEqual +``` +generates the following given instance: +```scala +object MyClass: + ... + given [A_L, A_R, G_L[_], G_R[_]](using CanEqual[A_L, A_R]): CanEqual[MyClass[A_L, G_L], MyClass[A_R, G_R]] = CanEqual.derived +``` + +### `TC` is not valid for automatic derivation + +Throw an error. + +The exact error depends on which of the above conditions failed. +As an example, if `TC` takes more than 1 parameter and is not `CanEqual`, the error is `DerivingType cannot be unified with the type argument of TC`. + +All data types can have a `derives` clause. The rest of this document focuses primarily on data types which also have a given instance +of the `Mirror` type class available. + +## `Mirror` + +`scala.deriving.Mirror` type class instances provide information at the type level about the components and labelling of the type. They also provide minimal term level infrastructure to allow higher level libraries to provide comprehensive derivation support. @@ -49,7 +175,7 @@ Instances for `Mirror` are also generated conditionally for: - and where the compiler can generate a `Mirror` type class instance for each child case. -The `Mirror` type class definition is as follows: +The `scala.deriving.Mirror` type class definition is as follows: ```scala sealed trait Mirror: @@ -158,11 +284,9 @@ Note the following properties of `Mirror` types, + The methods `ordinal` and `fromProduct` are defined in terms of `MirroredMonoType` which is the type of kind-`*` which is obtained from `MirroredType` by wildcarding its type parameters. -## Type classes supporting automatic deriving +## Implementing `derived` with `Mirror` -A trait or class can appear in a `derives` clause if its companion object defines a method named `derived`. The -signature and implementation of a `derived` method for a type class `TC[_]` are arbitrary but it is typically of the -following form, +As seen before, the signature and implementation of a `derived` method for a type class `TC[_]` are arbitrary, but we expect it to typically be of the following form: ```scala import scala.deriving.Mirror @@ -360,23 +484,9 @@ The framework described here enables all three of these approaches without manda For a brief discussion on how to use macros to write a type class `derived` method please read more at [How to write a type class `derived` method using macros](./derivation-macro.md). -## Deriving instances elsewhere - -Sometimes one would like to derive a type class instance for an ADT after the ADT is defined, without being able to -change the code of the ADT itself. To do this, simply define an instance using the `derived` method of the type class -as right-hand side. E.g, to implement `Ordering` for `Option` define, - -```scala -given [T: Ordering]: Ordering[Option[T]] = Ordering.derived -``` - -Assuming the `Ordering.derived` method has a context parameter of type `Mirror[T]` it will be satisfied by the -compiler generated `Mirror` instance for `Option` and the derivation of the instance will be expanded on the right -hand side of this definition in the same way as an instance defined in ADT companion objects. - ## Syntax -``` +```ebnf Template ::= InheritClauses [TemplateBody] EnumDef ::= id ClassConstr InheritClauses EnumBody InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] diff --git a/docs/_docs/reference/contextual/extension-methods.md b/docs/_docs/reference/contextual/extension-methods.md index d23cadf513d7..6a1504c25048 100644 --- a/docs/_docs/reference/contextual/extension-methods.md +++ b/docs/_docs/reference/contextual/extension-methods.md @@ -285,7 +285,7 @@ def position(s: String)(ch: Char, n: Int): Int = Here are the syntax changes for extension methods and collective extensions relative to the [current syntax](../syntax.md). -``` +```ebnf BlockStat ::= ... | Extension TemplateStat ::= ... | Extension TopStat ::= ... | Extension diff --git a/docs/_docs/reference/contextual/given-imports.md b/docs/_docs/reference/contextual/given-imports.md index 6a55368979b1..28442581e408 100644 --- a/docs/_docs/reference/contextual/given-imports.md +++ b/docs/_docs/reference/contextual/given-imports.md @@ -103,7 +103,7 @@ given instances once their user base has migrated. ## Syntax -``` +```ebnf Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index 411d50ba63ea..f1333bf8811f 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -10,8 +10,9 @@ that serve for synthesizing arguments to [context parameters](./using-clauses.md ```scala trait Ord[T]: def compare(x: T, y: T): Int - extension (x: T) def < (y: T) = compare(x, y) < 0 - extension (x: T) def > (y: T) = compare(x, y) > 0 + extension (x: T) + def < (y: T) = compare(x, y) < 0 + def > (y: T) = compare(x, y) > 0 given intOrd: Ord[Int] with def compare(x: Int, y: Int) = @@ -51,7 +52,7 @@ given [T](using Ord[T]): Ord[List[T]] with If the name of a given is missing, the compiler will synthesize a name from the implemented type(s). -**Note** The name synthesized by the compiler is chosen to be readable and reasonably concise. For instance, the two instances above would get the names: +**Note:** The name synthesized by the compiler is chosen to be readable and reasonably concise. For instance, the two instances above would get the names: ```scala given_Ord_Int @@ -62,7 +63,7 @@ The precise rules for synthesizing names are found [here](./relationship-implici given instances of types that are "too similar". To avoid conflicts one can use named instances. -**Note** To ensure robust binary compatibility, publicly available libraries should prefer named instances. +**Note:** To ensure robust binary compatibility, publicly available libraries should prefer named instances. ## Alias Givens @@ -173,7 +174,7 @@ is created for each reference. Here is the syntax for given instances: -``` +```ebnf TmplDef ::= ... | ‘given’ GivenDef GivenDef ::= [GivenSig] StructuralInstance diff --git a/docs/_docs/reference/contextual/multiversal-equality.md b/docs/_docs/reference/contextual/multiversal-equality.md index e9a81b95f472..b51d03b10963 100644 --- a/docs/_docs/reference/contextual/multiversal-equality.md +++ b/docs/_docs/reference/contextual/multiversal-equality.md @@ -33,6 +33,7 @@ that derives `CanEqual`, e.g. ```scala class T derives CanEqual ``` +> Normally a [derives clause](./derivation.md) accepts only type classes with one parameter, however there is a special case for `CanEqual`. Alternatively, one can also provide a `CanEqual` given instance directly, like this: @@ -82,7 +83,7 @@ def canEqualAny[L, R]: CanEqual[L, R] = CanEqual.derived ``` Even though `canEqualAny` is not declared as `given`, the compiler will still -construct an `canEqualAny` instance as answer to an implicit search for the +construct a `canEqualAny` instance as answer to an implicit search for the type `CanEqual[L, R]`, unless `L` or `R` have `CanEqual` instances defined on them, or the language feature `strictEquality` is enabled. @@ -156,10 +157,10 @@ Instances are defined so that every one of these types has a _reflexive_ `CanEqu - Primitive numeric types can be compared with subtypes of `java.lang.Number` (and _vice versa_). - `Boolean` can be compared with `java.lang.Boolean` (and _vice versa_). - `Char` can be compared with `java.lang.Character` (and _vice versa_). - - Two sequences (of arbitrary subtypes of `scala.collection.Seq`) can be compared + - Two sequences (arbitrary subtypes of `scala.collection.Seq`) can be compared with each other if their element types can be compared. The two sequence types need not be the same. - - Two sets (of arbitrary subtypes of `scala.collection.Set`) can be compared + - Two sets (arbitrary subtypes of `scala.collection.Set`) can be compared with each other if their element types can be compared. The two set types need not be the same. - Any subtype of `AnyRef` can be compared with `Null` (and _vice versa_). diff --git a/docs/_docs/reference/contextual/right-associative-extension-methods.md b/docs/_docs/reference/contextual/right-associative-extension-methods.md index 068123df8cd2..61f0beece6ed 100644 --- a/docs/_docs/reference/contextual/right-associative-extension-methods.md +++ b/docs/_docs/reference/contextual/right-associative-extension-methods.md @@ -4,45 +4,57 @@ title: "Right-Associative Extension Methods: Details" nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/right-associative-extension-methods.html --- -The most general form of leading parameters of an extension method is as follows: + +The most general signature an extension method can have is as follows: + - An optional type clause `leftTyParams` - A possibly empty list of using clauses `leadingUsing` - - A single parameter `extensionParam` + - A single parameter `leftParam` (in an explicit term clause) - A possibly empty list of using clauses `trailingUsing` + - A name (preceded by the `def` keyword) + - An optional type clause `rightTyParams` + - An optional single parameter `rightParam` (in an explicit term clause) + - Any number of any clauses `rest` -This is then followed by `def`, the method name, and possibly further parameters -`otherParams`. An example is: +For example: ```scala - extension (using a: A, b: B)(using c: C) // <-- leadingUsing - (x: X) // <-- extensionParam + extension [T] // <-- leftTyParams + (using a: A, b: B)(using c: C) // <-- leadingUsing + (x: X) // <-- leftParam (using d: D) // <-- trailingUsing - def +:: (y: Y)(using e: E)(z: Z) // <-- otherParams + def +:: [U] // <-- rightTyParams + (y: Y) // <-- rightParam + (using e: E)(z: Z) // <-- rest ``` + An extension method is treated as a right-associative operator (as in [SLS §6.12.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#infix-operations)) -if it has a name ending in `:` and is immediately followed by a -single parameter. In the example above, that parameter is `(y: Y)`. +if it has a name ending in `:`, and is immediately followed by a +single explicit term parameter (in other words, `rightParam` is present). In the example above, that parameter is `(y: Y)`. The Scala compiler pre-processes a right-associative infix operation such as `x +: xs` to `xs.+:(x)` if `x` is a pure expression or a call-by-name parameter and to `val y = x; xs.+:(y)` otherwise. This is necessary since a regular right-associative infix method is defined in the class of its right operand. To make up for this swap, -the expansion of right-associative extension methods performs an analogous parameter swap. More precisely, if `otherParams` consists of a single parameter -`rightParam` followed by `remaining`, the total parameter sequence +the expansion of right-associative extension methods performs the inverse parameter swap. More precisely, if `rightParam` is present, the total parameter sequence of the extension method's expansion is: ``` - leadingUsing rightParam trailingUsing extensionParam remaining + leftTyParams leadingUsing rightTyParams rightParam leftParam trailingUsing rest ``` +In other words, we swap `leftParams trailingUsing` with `rightTyParam rightParam`. + For instance, the `+::` method above would become ```scala - def +:: (using a: A, b: B)(using c: C) + def +:: [T] + (using a: A, b: B)(using c: C) + [U] (y: Y) - (using d: D) (x: X) + (using d: D) (using e: E)(z: Z) ``` diff --git a/docs/_docs/reference/contextual/type-classes.md b/docs/_docs/reference/contextual/type-classes.md index 9fc0d2eec864..6a15ac3a83d4 100644 --- a/docs/_docs/reference/contextual/type-classes.md +++ b/docs/_docs/reference/contextual/type-classes.md @@ -82,7 +82,7 @@ given Functor[List] with x.map(f) // List already has a `map` method ``` -With this `given` instance in scope, everywhere a `Functor` is expected, the compiler will accept a `List` to be used. +With this `given` instance in scope, everywhere a type with a `Functor` context bound is expected, the compiler will accept a `List` to be used. For instance, we may write such a testing method: @@ -214,7 +214,7 @@ instead of show(compute(i)(config))(config) ``` -Let's define this m then. First, we are going to define a type named `ConfigDependent` representing a function that when passed a `Config` produces a `Result`. +Let's define this `flatMap` then. First, we are going to define a type named `ConfigDependent` representing a function that when passed a `Config` produces a `Result`. ```scala type ConfigDependent[Result] = Config => Result diff --git a/docs/_docs/reference/contextual/using-clauses.md b/docs/_docs/reference/contextual/using-clauses.md index 9187e1916e7d..9177a2f47dc9 100644 --- a/docs/_docs/reference/contextual/using-clauses.md +++ b/docs/_docs/reference/contextual/using-clauses.md @@ -50,29 +50,36 @@ Generally, context parameters may be defined either as a full parameter list `(p ## Class Context Parameters -If a class context parameter is made a member by adding a `val` or `var` modifier, -then that member is available as a given instance. +To make a class context parameter visible from outside the class body, it can be made into a member by adding a `val` or `var` modifier. +```scala +class GivenIntBox(using val usingParameter: Int): + def myInt = summon[Int] -Compare the following examples, where the attempt to supply an explicit `given` member induces an ambiguity: +val b = GivenIntBox(using 23) +import b.usingParameter +summon[Int] // 23 +``` +This is preferable to creating an explicit `given` member, as the latter creates ambiguity inside the class body: ```scala -class GivenIntBox(using val givenInt: Int): - def n = summon[Int] - -class GivenIntBox2(using givenInt: Int): - given Int = givenInt - //def n = summon[Int] // ambiguous +class GivenIntBox2(using usingParameter: Int): + given givenMember: Int = usingParameter + def n = summon[Int] // ambiguous given instances: both usingParameter and givenMember match type Int ``` -The `given` member is importable as explained in the section on [importing `given`s](./given-imports.md): +From the outside of `GivenIntBox`, `usingParameter` appears as if it were defined in the class as `given usingParameter: Int`, in particular it must be imported as described in the section on [importing `given`s](./given-imports.md). ```scala val b = GivenIntBox(using 23) +// Works: import b.given summon[Int] // 23 +usingParameter // 23 +// Fails: import b.* -//givenInt // Not found +summon[Int] // No given instance found +usingParameter // Not found ``` ## Inferring Complex Arguments @@ -93,8 +100,7 @@ With this setup, the following calls are all well-formed, and they all normalize ```scala minimum(xs) maximum(xs)(using descending) -maximum(xs)(using descending(using listOrd)) -maximum(xs)(using descending(using listOrd(using intOrd))) +maximum(xs)(using descending(using intOrd)) ``` ## Multiple `using` Clauses @@ -144,10 +150,10 @@ def summon[T](using x: T): x.type = x Here is the new syntax of parameters and arguments seen as a delta from the [standard context free syntax of Scala 3](../syntax.md). `using` is a soft keyword, recognized only at the start of a parameter or argument list. It can be used as a normal identifier everywhere else. -``` +```ebnf ClsParamClause ::= ... | UsingClsParamClause -DefParamClauses ::= ... | UsingParamClause +DefParamClause ::= ... | UsingParamClause UsingClsParamClause ::= ‘(’ ‘using’ (ClsParams | Types) ‘)’ -UsingParamClause ::= ‘(’ ‘using’ (DefParams | Types) ‘)’ +UsingParamClause ::= ‘(’ ‘using’ (DefTermParams | Types) ‘)’ ParArgumentExprs ::= ... | ‘(’ ‘using’ ExprsInParens ‘)’ ``` diff --git a/docs/_docs/reference/dropped-features/delayed-init.md b/docs/_docs/reference/dropped-features/delayed-init.md index 5d4f614ce951..2694c3374f1c 100644 --- a/docs/_docs/reference/dropped-features/delayed-init.md +++ b/docs/_docs/reference/dropped-features/delayed-init.md @@ -18,7 +18,7 @@ object HelloWorld extends App { ``` However, the code is now run in the initializer of the object, which on -some JVM's means that it will only be interpreted. So, better not use it +some JVMs means that it will only be interpreted. So, better not use it for benchmarking! Also, if you want to access the command line arguments, you need to use an explicit `main` method for that. diff --git a/docs/_docs/reference/dropped-features/this-qualifier.md b/docs/_docs/reference/dropped-features/this-qualifier.md index 4fcadff8fae3..f75d19356696 100644 --- a/docs/_docs/reference/dropped-features/this-qualifier.md +++ b/docs/_docs/reference/dropped-features/this-qualifier.md @@ -29,5 +29,3 @@ This can cause problems if a program tries to access the missing private field v // [C] needed if `field` is to be accessed through reflection val retained = field * field ``` - - diff --git a/docs/_docs/reference/enums/adts.md b/docs/_docs/reference/enums/adts.md index 3ab8c9f3b45b..5219e062a633 100644 --- a/docs/_docs/reference/enums/adts.md +++ b/docs/_docs/reference/enums/adts.md @@ -154,7 +154,7 @@ The changes are specified below as deltas with respect to the Scala syntax given 1. Enum definitions are defined as follows: - ``` + ```ebnf TmplDef ::= `enum' EnumDef EnumDef ::= id ClassConstr [`extends' [ConstrApps]] EnumBody EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ @@ -164,7 +164,7 @@ The changes are specified below as deltas with respect to the Scala syntax given 2. Cases of enums are defined as follows: - ``` + ```ebnf EnumCase ::= `case' (id ClassConstr [`extends' ConstrApps]] | ids) ``` diff --git a/docs/_docs/reference/enums/enums-index.md b/docs/_docs/reference/enums/enums-index.md index 80d703c3e897..fb46b3e3ed6b 100644 --- a/docs/_docs/reference/enums/enums-index.md +++ b/docs/_docs/reference/enums/enums-index.md @@ -1,7 +1,7 @@ --- layout: index title: "Enums" -nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/index.html +movedTo: https://docs.scala-lang.org/scala3/reference/enums/index.html --- This chapter documents enums in Scala 3. diff --git a/docs/_docs/reference/experimental/canthrow.md b/docs/_docs/reference/experimental/canthrow.md index 025a0ed1c686..064d928fe26c 100644 --- a/docs/_docs/reference/experimental/canthrow.md +++ b/docs/_docs/reference/experimental/canthrow.md @@ -124,7 +124,7 @@ try body catch ... ``` -Note that the right-hand side of the synthesized given is `???` (undefined). This is OK since +Note that the right-hand side of the synthesized given is `compiletime.erasedValue`. This is OK since this given is erased; it will not be executed at runtime. **Note 1:** The [`saferExceptions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$saferExceptions$.html) feature is designed to work only with checked exceptions. An exception type is _checked_ if it is a subtype of diff --git a/docs/_docs/reference/experimental/cc.md b/docs/_docs/reference/experimental/cc.md index 878bc0a64ed6..2a7236453eab 100644 --- a/docs/_docs/reference/experimental/cc.md +++ b/docs/_docs/reference/experimental/cc.md @@ -1,9 +1,13 @@ --- layout: doc-page title: "Capture Checking" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/cc.html --- -Capture checking is a research project that modifies the Scala type system to track references to capabilities in values. It can be enabled with a `-Ycc` compiler option. +Capture checking is a research project that modifies the Scala type system to track references to capabilities in values. It can be enabled by the language import +```scala +import language.experimental.captureChecking +``` At present, capture checking is still highly experimental and unstable. To get an idea what capture checking can do, let's start with a small example: @@ -77,10 +81,6 @@ The following sections explain in detail how capture checking works in Scala 3. The capture checker extension introduces a new kind of types and it enforces some rules for working with these types. -Capture checking is enabled by the compiler option `-Ycc`. If the option is not given, the new -type forms can still be written but they are not checked for consistency, because they are -treated simply as certain uninterpreted annotated types. - ## Capabilities and Capturing Types Capture checking is done in terms of _capturing types_ of the form @@ -128,7 +128,8 @@ any capturing type that adds a capture set to `T`. ## Function Types The usual function type `A => B` now stands for a function that can capture arbitrary capabilities. We call such functions -_impure_. By contrast, the new single arrow function type `A -> B` stands for a function that cannot capture any capabilities, or otherwise said, is _pure_. One can add a capture set in front of an otherwise pure function. +_impure_. By contrast, the new single arrow function type `A -> B` stands for a function that cannot capture any capabilities, or otherwise said, is _pure_. +One can add a capture set in front of an otherwise pure function. For instance, `{c, d} A -> B` would be a function that can capture capabilities `c` and `d`, but no others. The impure function type `A => B` is treated as an alias for `{*} A -> B`. That is, impure functions are functions that can capture anything. @@ -176,7 +177,7 @@ def f(x: {c}-> Int): Int ``` Here, the actual argument to `f` is allowed to use the `c` capability but no others. -**Note**: It is strongly recommended to write the capability set and the arrow `->` without intervening spaces, +**Note:** It is strongly recommended to write the capability set and the arrow `->` without intervening spaces, as otherwise the notation would look confusingly like a function type. ## Subtyping and Subcapturing @@ -502,7 +503,7 @@ crasher() This code needs to be rejected since otherwise the call to `crasher()` would cause an unhandled `LimitExceeded` exception to be thrown. -Under `-Ycc`, the code is indeed rejected +Under the language import `language.experimental.captureChecking`, the code is indeed rejected ``` 14 | try () => xs.map(f).sum | ^ @@ -654,7 +655,6 @@ TBD The following options are relevant for capture checking. - - **-Ycc** Enables capture checking. - **-Xprint:cc** Prints the program with capturing types as inferred by capture checking. - **-Ycc-debug** Gives more detailed, implementation-oriented information about capture checking, as described in the next section. diff --git a/docs/_docs/reference/experimental/erased-defs-spec.md b/docs/_docs/reference/experimental/erased-defs-spec.md index 5395a8468399..59dfed92da2a 100644 --- a/docs/_docs/reference/experimental/erased-defs-spec.md +++ b/docs/_docs/reference/experimental/erased-defs-spec.md @@ -19,8 +19,8 @@ TODO: complete def g(erased x: Int) = ... - (erased x: Int) => ... - def h(x: (erased Int) => Int) = ... + (erased x: Int, y: Int) => ... + def h(x: (Int, erased Int) => Int) = ... class K(erased x: Int) { ... } erased class E {} @@ -34,12 +34,12 @@ TODO: complete 3. Functions * `(erased x1: T1, x2: T2, ..., xN: TN) => y : (erased T1, T2, ..., TN) => R` - * `(given erased x1: T1, x2: T2, ..., xN: TN) => y: (given erased T1, T2, ..., TN) => R` + * `(given x1: T1, erased x2: T2, ..., xN: TN) => y: (given T1, erased T2, ..., TN) => R` * `(given erased T1) => R <:< erased T1 => R` - * `(given erased T1, T2) => R <:< (erased T1, T2) => R` + * `(given T1, erased T2) => R <:< (T1, erased T2) => R` * ... - Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`) + Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`). The `erased` parameters must match exactly in their respective positions. 4. Eta expansion @@ -51,7 +51,8 @@ TODO: complete * All `erased` parameters are removed from the function * All argument to `erased` parameters are not passed to the function * All `erased` definitions are removed - * All `(erased T1, T2, ..., TN) => R` and `(given erased T1, T2, ..., TN) => R` become `() => R` + * `(erased ET1, erased ET2, T1, ..., erased ETN, TM) => R` are erased to `(T1, ..., TM) => R`. + * `(given erased ET1, erased ET2, T1, ..., erased ETN, TM) => R` are erased to `(given T1, ..., TM) => R`. 6. Overloading @@ -60,5 +61,10 @@ TODO: complete 7. Overriding - * Member definitions overriding each other must both be `erased` or not be `erased` - * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa + * Member definitions overriding each other must both be `erased` or not be `erased`. + * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa. + +8. Type Restrictions + * For dependent functions, `erased` parameters are limited to realizable types, that is, types that are inhabited by non-null values. + This restriction stops us from using a bad bound introduced by an erased value, which leads to unsoundness (see #4060). + * Polymorphic functions with erased parameters are currently not supported, and will be rejected by the compiler. This is purely an implementation restriction, and might be lifted in the future. diff --git a/docs/_docs/reference/experimental/erased-defs.md b/docs/_docs/reference/experimental/erased-defs.md index 28455f26cdc0..ef4f02e33dd4 100644 --- a/docs/_docs/reference/experimental/erased-defs.md +++ b/docs/_docs/reference/experimental/erased-defs.md @@ -54,13 +54,13 @@ semantics and they are completely erased. ## How to define erased terms? Parameters of methods and functions can be declared as erased, placing `erased` -in front of a parameter list (like `given`). +in front of each erased parameter (like `inline`). ```scala -def methodWithErasedEv(erased ev: Ev): Int = 42 +def methodWithErasedEv(erased ev: Ev, x: Int): Int = x + 2 -val lambdaWithErasedEv: erased Ev => Int = - (erased ev: Ev) => 42 +val lambdaWithErasedEv: (erased Ev, Int) => Int = + (erased ev, x) => x + 2 ``` `erased` parameters will not be usable for computations, though they can be used @@ -80,7 +80,7 @@ parameters. ```scala erased val erasedEvidence: Ev = ... -methodWithErasedEv(erasedEvidence) +methodWithErasedEv(erasedEvidence, 40) // 42 ``` ## What happens with erased values at runtime? @@ -89,15 +89,15 @@ As `erased` are guaranteed not to be used in computations, they can and will be erased. ```scala -// becomes def methodWithErasedEv(): Int at runtime -def methodWithErasedEv(erased ev: Ev): Int = ... +// becomes def methodWithErasedEv(x: Int): Int at runtime +def methodWithErasedEv(x: Int, erased ev: Ev): Int = ... def evidence1: Ev = ... erased def erasedEvidence2: Ev = ... // does not exist at runtime erased val erasedEvidence3: Ev = ... // does not exist at runtime -// evidence1 is not evaluated and no value is passed to methodWithErasedEv -methodWithErasedEv(evidence1) +// evidence1 is not evaluated and only `x` is passed to methodWithErasedEv +methodWithErasedEv(x, evidence1) ``` ## State machine with erased evidence example diff --git a/docs/_docs/reference/experimental/explicit-nulls.md b/docs/_docs/reference/experimental/explicit-nulls.md index b3fa53429cfe..f8f9ac8e11be 100644 --- a/docs/_docs/reference/experimental/explicit-nulls.md +++ b/docs/_docs/reference/experimental/explicit-nulls.md @@ -540,4 +540,4 @@ Our strategy for binary compatibility with Scala binaries that predate explicit and new libraries compiled without `-Yexplicit-nulls` is to leave the types unchanged and be compatible but unsound. -[More details](https://dotty.epfl.ch/docs/internals/explicit-nulls.html) +[Implementation details](https://dotty.epfl.ch/docs/internals/explicit-nulls.html) diff --git a/docs/_docs/reference/experimental/fewer-braces.md b/docs/_docs/reference/experimental/fewer-braces.md index eb454886ad03..5d6f190d4bc1 100644 --- a/docs/_docs/reference/experimental/fewer-braces.md +++ b/docs/_docs/reference/experimental/fewer-braces.md @@ -4,4 +4,4 @@ title: "Fewer Braces" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/fewer-braces.html --- -The documentation contained in this file is now part of [./indentation.html]. \ No newline at end of file +The documentation contained in this file is now part of [./indentation.html]. diff --git a/docs/_docs/reference/experimental/generalized-method-syntax.md b/docs/_docs/reference/experimental/generalized-method-syntax.md new file mode 100644 index 000000000000..072052c1ae10 --- /dev/null +++ b/docs/_docs/reference/experimental/generalized-method-syntax.md @@ -0,0 +1,102 @@ +--- +layout: doc-page +title: "Generalized Method Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/generalized-method-syntax.html +--- + +This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: + +```scala +import scala.language.experimental.clauseInterleaving +``` + +The inclusion of using clauses is not the only way in which methods have been updated, type parameter clauses are now allowed in any number and at any position. + +## Syntax Changes + +### In Scala 2 + +The old syntax only allowed zero or one type parameter clause, followed by any number of term clauses, optionnally followed by an implicit clause: + +```scala +def foo[T, U](x: T)(y: U)(z: Int, s: String)(a: Array[T])(implicit ordInt: Ord[Int], l: List[U]) +``` + +### In Scala 3 + +The new syntax allows any number of type clauses, as long as they are not adjacent: +(do note however that [implicit clause are discouraged, in favor of using clauses](https://docs.scala-lang.org/scala3/reference/contextual/relationship-implicits.html)) + +```scala +def foo[T, U](x: T)(y: U)[V](z: V, s: String)(using Ord[Int])[A](a: Array[A])(implicit List[U]) +``` + +### Unchanged + +Class definitions and type declarations are unaffected, there can only be up to one type clause, in leading posion. + +## Motivation + +The new syntax is a powerful but natural extension of the old one, it allows new design patterns while staying intuitive and legible. + +### Dependent Type Clauses + +As type clauses can come after term clauses, it is now possible to have type parameters that depend on term parameters: + +```scala +trait Key { type Value } +trait DB { + def get(k: Key): Option[k.Value] // dependent result type + def getOrElse(k: Key)[V >: k.Value](default: V): V // dependent type parameter +} +``` + +Note that simply replacing `V` by `k.Value` would not be equivalent. For example, if `k.Value` is `Some[Int]`, only the above allows: +`getOrElse(k)[Option[Int]](None)`, which returns a `Number`. + +## Details + +### Application + +Method application is unchanged. +When multiple type clauses are expected but not all are passed, the rightmost ones are inferred. + +In particular, the following does not type check, even though the argument `Char` is only valid for `C`: +```scala +def triple[I <: Int](using Ordering[I])[C <: Char](a: I, b: C) = ??? +triple[Char](0, 'c') // error: Char does not conform to upperbound Int +``` + +### Extension Methods + +Extension methods follow the same syntax, for example the following is valid: +```scala +extension [T](l1: List[T]) + def zipWith[U](l2: List[U])[V](l3: List[V]): List[(T,U,V)] +``` + +### When to use + +We recommand to always put a unique type clause at the beginning, unless it is not possible to do so. +For example, the extension method `zipWith` above should be written `zipWith[U, V](l2: List[U], l3: List[V]): List[(T,U,V)]` instead. +On the other hand, the `getOrElse` method is recommended as-is, as it cannot be written with a leading type clause. + +### Formal syntax + +``` +DefDcl ::= DefSig ‘:’ Type +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr +DefSig ::= id [DefParamClauses] [DefImplicitClause] +DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent +DefParamClause ::= DefTypeParamClause + | DefTermParamClause + | UsingParamClause +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ +DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ +DefTermParams ::= DefTermParam {‘,’ DefTermParam} +DefTermParam ::= {Annotation} [‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] +``` diff --git a/docs/_docs/reference/experimental/into-modifier.md b/docs/_docs/reference/experimental/into-modifier.md new file mode 100644 index 000000000000..2ee4c74539b3 --- /dev/null +++ b/docs/_docs/reference/experimental/into-modifier.md @@ -0,0 +1,81 @@ +--- +layout: doc-page +title: "The `into` Type Modifier" +redirectFrom: /docs/reference/other-new-features/into-modifier.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/into-modifier.html +--- + +Scala 3's implicit conversions of the `scala.Conversion` class require a language import +``` +import scala.language.implicitConversions +``` +in any code that uses them as implicit conversions (code that calls conversions explicitly is not affected). If the import is missing, a feature warning is currently issued, and this will become an error in a future version of Scala 3. The motivation for this restriction is that code with hidden implicit conversions is hard to understand and might have correctness or performance problems that go undetected. + +There is one broad use case, however, where implicit conversions are very hard to replace. This is the case where an implicit conversion is used to adapt a method argument to its formal parameter type. An example from the standard library: +```scala +scala> val xs = List(0, 1) +scala> val ys = Array(2, 3) +scala> xs ++ ys +val res0: List[Int] = List(0, 1, 2, 3) +``` +The last input made use of an implicit conversion from `Array[Int]` to `IterableOnce[Int]` which is defined as a Scala 2 style implicit conversion in the standard library. Once the standard library is rewritten with Scala 3 conversions, this will +require a language import at the use site, which is clearly unacceptable. It is possible to avoid the need for implicit conversions using method overloading or type classes, but this often leads to longer and more complicated code, and neither of these alternatives work for vararg parameters. + +This is where the `into` modifier on parameter types comes in. Here is a signature of the `++` method on `List[A]` that uses it: +```scala + def ++ (elems: into IterableOnce[A]): List[A] +``` +The `into` modifier on the type of `elems` means that implicit conversions can be applied to convert the actual argument to an `IterableOnce` value, and this without needing a language import. + +## Function arguments + +`into` also allows conversions on the results of function arguments. For instance, consider the new proposed signature of the `flatMap` method on `List[A]`: + +```scala + def flatMap[B](f: into A => IterableOnce[B]): List[B] +``` +This allows a conversion of the actual argument to the function type `A => IterableOnce[B]`. Crucially, it also allows that conversion to be applied to +the function result. So the following would work: +```scala +scala> val xs = List(1, 2, 3) +scala> xs.flatMap(x => x.toString * x) +val res2: List[Char] = List(1, 2, 2, 3, 3, 3) +``` +Here, the conversion from `String` to `Iterable[Char]` is applied on the results of `flatMap`'s function argument when it is applied to the elements of `xs`. + +## Vararg arguments + +When applied to a vararg parameter, `into` allows a conversion on each argument value individually. For example, consider a method `concatAll` that concatenates a variable +number of `IterableOnce[Char]` arguments, and also allows implicit conversions into `IterableOnce[Char]`: + +```scala +def concatAll(xss: into IterableOnce[Char]*): List[Char] = + xss.foldLeft(List[Char]())(_ ++ _) +``` +Here, the call +```scala +concatAll(List('a'), "bc", Array('d', 'e')) +``` +would apply two _different_ implicit conversions: the conversion from `String` to `Iterable[Char]` gets applied to the second argument and the conversion from `Array[Char]` to `Iterable[Char]` gets applied to the third argument. + +## Retrofitting Scala 2 libraries + +A new annotation `allowConversions` has the same effect as an `into` modifier. It is defined as an `@experimental` class in package `scala.annotation`. It is intended to be used for retrofitting Scala 2 library code so that Scala 3 conversions can be applied to arguments without language imports. For instance, the definitions of +`++` and `flatMap` in the Scala 2.13 `List` class could be retrofitted as follows. +```scala + def ++ (@allowConversions elems: IterableOnce[A]): List[A] + def flatMap[B](@allowConversions f: A => IterableOnce[B]): List[B] +``` +For Scala 3 code, the `into` modifier is preferred. First, because it is shorter, +and second, because it adheres to the principle that annotations should not influence +typing and type inference in Scala. + +## Syntax changes + +The addition to the grammar is: +``` +ParamType ::= [‘=>’] ParamValueType +ParamValueType ::= [‘into‘] ExactParamType +ExactParamType ::= Type [‘*’] +``` +As the grammar shows, `into` can only applied to the type of a parameter; it is illegal in other positions. diff --git a/docs/_docs/reference/experimental/main-annotation.md b/docs/_docs/reference/experimental/main-annotation.md index 0c60e1050b87..7cc105be06f9 100644 --- a/docs/_docs/reference/experimental/main-annotation.md +++ b/docs/_docs/reference/experimental/main-annotation.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "MainAnnotation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/main-annotation.html --- `MainAnnotation` provides a generic way to define main annotations such as `@main`. diff --git a/docs/_docs/reference/experimental/named-typeargs-spec.md b/docs/_docs/reference/experimental/named-typeargs-spec.md index 9e1113bbac86..741836a481f2 100644 --- a/docs/_docs/reference/experimental/named-typeargs-spec.md +++ b/docs/_docs/reference/experimental/named-typeargs-spec.md @@ -10,7 +10,7 @@ In this section we give more details about the [named type arguments](named-type The addition to the grammar is: -``` +```ebnf SimpleExpr1 ::= ... | SimpleExpr (TypeArgs | NamedTypeArgs) NamedTypeArgs ::= ‘[’ NamedTypeArg {‘,’ NamedTypeArg} ‘]’ @@ -19,7 +19,7 @@ NamedTypeArg ::= id ‘=’ Type Note in particular that named arguments cannot be passed to type constructors: -``` scala +```scala class C[T] val x: C[T = Int] = // error diff --git a/docs/_docs/reference/experimental/numeric-literals.md b/docs/_docs/reference/experimental/numeric-literals.md index f493ef459265..8b7aaa23f9e0 100644 --- a/docs/_docs/reference/experimental/numeric-literals.md +++ b/docs/_docs/reference/experimental/numeric-literals.md @@ -4,7 +4,7 @@ title: "Numeric Literals" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/numeric-literals.html --- -**Note**: This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: +This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: ```scala import scala.language.experimental.genericNumberLiterals diff --git a/docs/_docs/reference/experimental/purefuns.md b/docs/_docs/reference/experimental/purefuns.md new file mode 100644 index 000000000000..7c369f85f010 --- /dev/null +++ b/docs/_docs/reference/experimental/purefuns.md @@ -0,0 +1,32 @@ +--- +layout: doc-page +title: "Pure Function Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/purefuns.html +--- + +Pure functions are an experimental feature that can be enabled by the language import +```scala +import language.experimental.pureFunctions +``` +Under that import the syntax `A -> B` is available with the intention that it should denote a pure, side effect-free function from `A` to `B`. Some other variants are also supported: +```scala + (A1, ..., An) -> B // a multi-argument pure function + (x1: A1, ..., xn: An) -> B // a dependent pure function + A ?-> B // a pure context function + (A1, ..., An) ?-> B // a multi-argument pure context function + (x1: A1, ..., xn: An) ?-> B // a dependent pure context function + -> B // a pure call-by-name parameter +``` +A function's purity can be checked by capture tracking, another experimental language feature which is presently in a very early stage. Until that second feature matures, the pure function syntax should be understood to be for documentation only. A pure function type is a requirement that all its instances should be side effect-free. This requirement currently needs to be checked manually, but checking might be automated in the future. + +## Why Enable It Now? + +There are at least three reasons why one might want to enable `pureFunctions` today: + + - to get better documentation since it makes the intent clear, + - to prepare the code base for a time when full effect checking is implemented, + - to have a common code base that can be compiled with or without capture checking enabled. + +## More info: + +TBD \ No newline at end of file diff --git a/docs/_docs/reference/experimental/tupled-function.md b/docs/_docs/reference/experimental/tupled-function.md index da108fc832ad..0cc016953a80 100644 --- a/docs/_docs/reference/experimental/tupled-function.md +++ b/docs/_docs/reference/experimental/tupled-function.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Tupled Function" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/tupled-function.html --- Tupled Function diff --git a/docs/_docs/reference/language-versions/binary-compatibility.md b/docs/_docs/reference/language-versions/binary-compatibility.md index df1c19f97868..d0409d32e6b7 100644 --- a/docs/_docs/reference/language-versions/binary-compatibility.md +++ b/docs/_docs/reference/language-versions/binary-compatibility.md @@ -1,7 +1,7 @@ --- layout: doc-page title: "Binary Compatibility" -nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html +movedTo: https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html --- In Scala 2 different minor versions of the compiler were free to change the way how they encode different language features in JVM bytecode so each bump of the compiler's minor version resulted in breaking binary compatibility and if a project had any Scala dependencies they all needed to be (cross-)compiled to the same minor Scala version that was used in that project itself. On the contrary, Scala 3 has a stable encoding into JVM bytecode. diff --git a/docs/_docs/reference/language-versions/language-versions.md b/docs/_docs/reference/language-versions/language-versions.md index 2dfd04857cab..1bc8d939a7e9 100644 --- a/docs/_docs/reference/language-versions/language-versions.md +++ b/docs/_docs/reference/language-versions/language-versions.md @@ -1,7 +1,7 @@ --- layout: index title: "Language Versions" -nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/index.html +movedTo: https://docs.scala-lang.org/scala3/reference/language-versions/index.html --- Additional information on interoperability and migration between Scala 2 and 3 can be found [here](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html). diff --git a/docs/_docs/reference/language-versions/source-compatibility.md b/docs/_docs/reference/language-versions/source-compatibility.md index 4d5b468ac8f2..077f06b2b4db 100644 --- a/docs/_docs/reference/language-versions/source-compatibility.md +++ b/docs/_docs/reference/language-versions/source-compatibility.md @@ -1,7 +1,7 @@ --- layout: doc-page title: "Source Compatibility" -nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/source-compatibility.html +movedTo: https://docs.scala-lang.org/scala3/reference/language-versions/source-compatibility.html --- Scala 3 does NOT guarantee source compatibility between different minor language versions (e.g. some syntax valid in 3.x might get deprecated and then phased out in 3.y for y > x). There are also some syntax structures that were valid in Scala 2 but are not anymore in Scala 3. However the compiler provides a possibility to specify the desired version of syntax used in a particular file or globally for a run of the compiler to make migration between versions easier. @@ -40,4 +40,4 @@ class C { ... } Language imports supersede command-line settings in the source files where they are specified. Only one language import specifying a source version is allowed in a source file, and it must come before any definitions in that file. -**Note**: The [Scala 3 Migration Guide](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html) gives further information to help the Scala programmer moving from Scala 2.13 to Scala 3. +**Note:** The [Scala 3 Migration Guide](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html) gives further information to help the Scala programmer moving from Scala 2.13 to Scala 3. diff --git a/docs/_docs/reference/metaprogramming/compiletime-ops.md b/docs/_docs/reference/metaprogramming/compiletime-ops.md index a43c941ae943..038935badc0b 100644 --- a/docs/_docs/reference/metaprogramming/compiletime-ops.md +++ b/docs/_docs/reference/metaprogramming/compiletime-ops.md @@ -11,7 +11,7 @@ The [`scala.compiletime`](https://scala-lang.org/api/3.x/scala/compiletime.html) ### `constValue` and `constValueOpt` `constValue` is a function that produces the constant value represented by a -type. +type, or a compile time error if the type is not a constant type. ```scala import scala.compiletime.constValue @@ -30,6 +30,8 @@ enabling us to handle situations where a value is not present. Note that `S` is the type of the successor of some singleton type. For example the type `S[1]` is the singleton type `2`. +Since tuples are not constant types, even if their constituants are, there is `constValueTuple`, which given a tuple type `(X1, ..., Xn)`, returns a tuple value `(constValue[X1], ..., constValue[Xn])`. + ### `erasedValue` So far we have seen inline methods that take terms (tuples and integers) as @@ -170,42 +172,9 @@ val concat: "a" + "b" = "ab" val addition: 1 + 1 = 2 ``` -## Summoning Implicits Selectively - -It is foreseen that many areas of typelevel programming can be done with rewrite -methods instead of implicits. But sometimes implicits are unavoidable. The -problem so far was that the Prolog-like programming style of implicit search -becomes viral: Once some construct depends on implicit search it has to be -written as a logic program itself. Consider for instance the problem of creating -a `TreeSet[T]` or a `HashSet[T]` depending on whether `T` has an `Ordering` or -not. We can create a set of implicit definitions like this: - -```scala -trait SetFor[T, S <: Set[T]] - -class LowPriority: - implicit def hashSetFor[T]: SetFor[T, HashSet[T]] = ... - -object SetsFor extends LowPriority: - implicit def treeSetFor[T: Ordering]: SetFor[T, TreeSet[T]] = ... -``` +## Summoning Givens Selectively -Clearly, this is not pretty. Besides all the usual indirection of implicit -search, we face the problem of rule prioritization where we have to ensure that -`treeSetFor` takes priority over `hashSetFor` if the element type has an -ordering. This is solved (clumsily) by putting `hashSetFor` in a superclass -`LowPriority` of the object `SetsFor` where `treeSetFor` is defined. Maybe the -boilerplate would still be acceptable if the crufty code could be contained. -However, this is not the case. Every user of the abstraction has to be -parameterized itself with a `SetFor` implicit. Considering the simple task _"I -want a `TreeSet[T]` if `T` has an ordering and a `HashSet[T]` otherwise"_, this -seems like a lot of ceremony. - -There are some proposals to improve the situation in specific areas, for -instance by allowing more elaborate schemes to specify priorities. But they all -keep the viral nature of implicit search programs based on logic programming. - -By contrast, the new `summonFrom` construct makes implicit search available +The new `summonFrom` construct makes implicit search available in a functional context. To solve the problem of creating the right set, one would use it as follows: @@ -221,7 +190,7 @@ inline def setFor[T]: Set[T] = summonFrom { A `summonFrom` call takes a pattern matching closure as argument. All patterns in the closure are type ascriptions of the form `identifier : Type`. -Patterns are tried in sequence. The first case with a pattern `x: T` such that an implicit value of type `T` can be summoned is chosen. +Patterns are tried in sequence. The first case with a pattern `x: T` such that a contextual value of type `T` can be summoned is chosen. Alternatively, one can also use a pattern-bound given instance, which avoids the explicit using clause. For instance, `setFor` could also be formulated as follows: @@ -236,18 +205,18 @@ inline def setFor[T]: Set[T] = summonFrom { `summonFrom` applications must be reduced at compile time. -Consequently, if we summon an `Ordering[String]` the code above will return a -new instance of `TreeSet[String]`. +Consequently, if a given instance of `Ordering[String]` is in the implicit scope, the code above will return a +new instance of `TreeSet[String]`. Such an instance is defined in `Ordering`'s companion object, so there will always be one. ```scala -summon[Ordering[String]] +summon[Ordering[String]] // Proves that an Ordering[String] is in scope println(setFor[String].getClass) // prints class scala.collection.immutable.TreeSet ``` -**Note** `summonFrom` applications can raise ambiguity errors. Consider the following +**Note:** `summonFrom` applications can raise ambiguity errors. Consider the following code with two givens in scope of type `A`. The pattern match in `f` will raise -an ambiguity error of `f` is applied. +an ambiguity error if `f` is applied. ```scala class A diff --git a/docs/_docs/reference/metaprogramming/inline.md b/docs/_docs/reference/metaprogramming/inline.md index 8516957c6412..0c4800069bad 100644 --- a/docs/_docs/reference/metaprogramming/inline.md +++ b/docs/_docs/reference/metaprogramming/inline.md @@ -344,10 +344,10 @@ In a transparent inline, an `inline if` will force the inlining of any inline de ## Inline Matches A `match` expression in the body of an `inline` method definition may be -prefixed by the `inline` modifier. If there is enough static information to -unambiguously take a branch, the expression is reduced to that branch and the -type of the result is taken. If not, a compile-time error is raised that -reports that the match cannot be reduced. +prefixed by the `inline` modifier. If there is enough type information +at compile time to select a branch, the expression is reduced to that branch and the +type of the expression is the type of the right-hand side of that result. +If not, a compile-time error is raised that reports that the match cannot be reduced. The example below defines an inline method with a single inline match expression that picks a case based on its static type: @@ -363,8 +363,9 @@ g("test") // Has type (String, String) ``` The scrutinee `x` is examined statically and the inline match is reduced -accordingly returning the corresponding value (with the type specialized because `g` is declared `transparent`). This example performs a simple type test over the -scrutinee. The type can have a richer structure like the simple ADT below. +accordingly returning the corresponding value (with the type specialized because `g` is declared `transparent`). +This example performs a simple type test over the scrutinee. +The type can have a richer structure like the simple ADT below. `toInt` matches the structure of a number in [Church-encoding](https://en.wikipedia.org/wiki/Church_encoding) and _computes_ the corresponding integer. diff --git a/docs/_docs/reference/metaprogramming/macros-spec.md b/docs/_docs/reference/metaprogramming/macros-spec.md index aa8f94a9a1f7..35a1b4b3d43a 100644 --- a/docs/_docs/reference/metaprogramming/macros-spec.md +++ b/docs/_docs/reference/metaprogramming/macros-spec.md @@ -4,251 +4,711 @@ title: "Macros Spec" nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros-spec.html --- +## Formalization + +* Multi-stage programming with generative and analytical macros[^2] +* Multi-Stage Macro Calculus, Chapter 4 of Scalable Metaprogramming in Scala 3[^1]. + Contains and extends the calculus of _Multi-stage programming with generative and analytical macros_ with type polymorphism. + +## Syntax + +The quotation syntax using `'` and `$` was chosen to mimic the string interpolation syntax of Scala. +Like a string double-quotation, a single-quote block can contain splices. +However, unlike strings, splices can contain quotes using the same rules. + +```scala +s" Hello $name" s" Hello ${name}" +'{ hello($name) } '{ hello(${name}) } +${ hello('name) } ${ hello('{name}) } +``` + +### Quotes +Quotes come in four flavors: quoted identifiers, quoted blocks, quoted block patterns and quoted type patterns. +Scala 2 used quoted identifiers to represent `Symbol` literals. They were deprecated in Scala 3, allowing the syntax to be used for quotation. +```scala +SimpleExpr ::= ... + | `'` alphaid // quoted identifier + | `'` `{` Block `}` // quoted block +Pattern ::= ... + | `'` `{` Block `}` // quoted block pattern + | `'` `[` Type `]` // quoted type pattern +``` + +Quoted blocks and quoted block patterns contain an expression equivalent to a normal block of code. +When entering either of those we track the fact that we are in a quoted block (`inQuoteBlock`) which is used for spliced identifiers. +When entering a quoted block pattern we additionally track the fact that we are in a quoted pattern (`inQuotePattern`) which is used to distinguish spliced blocks and splice patterns. +Lastly, the quoted type pattern simply contains a type. + +### Splices +Splices come in three flavors: spliced identifiers, spliced blocks and splice patterns. +Scala specifies identifiers containing `$` as valid identifiers but reserves them for compiler and standard library use only. +Unfortunately, many libraries have used such identifiers in Scala 2. Therefore to mitigate the cost of migration, we still support them. +We work around this by only allowing spliced identifiers[^3] within quoted blocks or quoted block patterns (`inQuoteBlock`). +Splice blocks and splice patterns can contain an arbitrary block or pattern respectively. +They are distinguished based on their surrounding quote (`inQuotePattern`), a quote block will contain spliced blocks, and a quote block pattern will contain splice patterns. + +```scala +SimpleExpr ::= ... + | `$` alphaid if inQuoteBlock // spliced identifier + | `$` `{` Block `}` if !inQuotePattern // spliced block + | `$` `{` Pattern `}` if inQuotePattern // splice pattern +``` + +### Quoted Pattern Type Variables +Quoted pattern type variables in quoted patterns and quoted type patterns do not require additional syntax. +Any type definition or reference with a name composed of lower cases is assumed to be a pattern type variable definition while typing. +A backticked type name with lower cases is interpreted as a reference to the type with that name. + + ## Implementation -### Syntax - -Compared to the [Scala 3 reference grammar](../syntax.md) -there are the following syntax changes: -``` -SimpleExpr ::= ... - | ‘'’ ‘{’ Block ‘}’ - | ‘'’ ‘[’ Type ‘]’ - | ‘$’ ‘{’ Block ‘}’ -SimpleType ::= ... - | ‘$’ ‘{’ Block ‘}’ -``` -In addition, an identifier `$x` starting with a `$` that appears inside -a quoted expression or type is treated as a splice `${x}` and a quoted identifier -`'x` that appears inside a splice is treated as a quote `'{x}` - -### Implementation in `scalac` - -Quotes and splices are primitive forms in the generated abstract syntax trees. -Top-level splices are eliminated during macro expansion while typing. On the -other hand, top-level quotes are eliminated in an expansion phase `PickleQuotes` -phase (after typing and pickling). PCP checking occurs while preparing the RHS -of an inline method for top-level splices and in the `Staging` phase (after -typing and before pickling). - -Macro-expansion works outside-in. If the outermost scope is a splice, -the spliced AST will be evaluated in an interpreter. A call to a -previously compiled method can be implemented as a reflective call to -that method. With the restrictions on splices that are currently in -place that’s all that’s needed. We might allow more interpretation in -splices in the future, which would allow us to loosen the -restriction. Quotes in spliced, interpreted code are kept as they -are, after splices nested in the quotes are expanded. - -If the outermost scope is a quote, we need to generate code that -constructs the quoted tree at run-time. We implement this by -serializing the tree as a TASTy structure, which is stored -in a string literal. At runtime, an unpickler method is called to -deserialize the string into a tree. - -Splices inside quoted code insert the spliced tree as is, after -expanding any quotes in the spliced code recursively. +### Run-Time Representation -## Formalization +The standard library defines the `Quotes` interface which contains all the logic and the abstract classes `Expr` and `Type`. +The compiler implements the `Quotes` interface and provides the implementation of `Expr` and `Type`. -The phase consistency principle can be formalized in a calculus that -extends simply-typed lambda calculus with quotes and splices. +##### `class Expr` +Expressions of type `Expr[T]` are represented by the following abstract class: +```scala +abstract class Expr[+T] private[scala] +``` +The only implementation of `Expr` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps a typed AST and a `Scope` object with no methods of its own. +The `Scope` object is used to track the current splice scope and detect scope extrusions. -### Syntax +##### `object Expr` +The companion object of `Expr` contains a few useful static methods; +the `apply`/`unapply` methods to use `ToExpr`/`FromExpr` with ease; +the `betaReduce` and `summon` methods. +It also contains methods to create expressions out of lists or sequences of expressions: `block`, `ofSeq`, `ofList`, `ofTupleFromSeq` and `ofTuple`. -The syntax of terms, values, and types is given as follows: +```scala +object Expr: + def apply[T](x: T)(using ToExpr[T])(using Quotes): Expr[T] = ... + def unapply[T](x: Expr[T])(using FromExpr[T])(using Quotes): Option[T] = ... + def betaReduce[T](e: Expr[T])(using Quotes): Expr[T] = ... + def summon[T: Type](using Quotes): Option[Expr[T]] = ... + def block[T](stats: List[Expr[Any]], e: Expr[T])(using Quotes): Expr[T] = ... + def ofSeq[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[Seq[T]] = ... + def ofList[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[List[T]] = ... + def ofTupleFromSeq(xs: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = ... + def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): + Expr[Tuple.InverseMap[T, Expr]] = ... ``` -Terms t ::= x variable - (x: T) => t lambda - t t application - 't quote - $t splice -Values v ::= (x: T) => t lambda - 'u quote +##### `class Type` +Types of type `Type[T]` are represented by the following abstract class: +```scala +abstract class Type[T <: AnyKind] private[scala]: + type Underlying = T +``` + +The only implementation of `Type` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps the AST of a type and a `Scope` object with no methods of its own. +The upper bound of `T` is `AnyKind` which implies that `T` may be a higher-kinded type. +The `Underlying` alias is used to select the type from an instance of `Type`. +Users never need to use this alias as they can always use `T` directly. +`Underlying` is used for internal encoding while compiling the code (see _Type Healing_). -Simple terms u ::= x | (x: T) => u | u u | 't +##### `object Type` +The companion object of `Type` contains a few useful static methods. +The first and most important one is the `Type.of` given definition. +This instance of `Type[T]` is summoned by default when no other instance is available. +The `of` operation is an intrinsic operation that the compiler will transform into code that will generate the `Type[T]` at run-time. +Secondly, the `Type.show[T]` operation will show a string representation of the type, which is often useful when debugging. +Finally, the object defines `valueOfConstant` (and `valueOfTuple`) which can transform singleton types (or tuples of singleton types) into their value. -Types T ::= A base type - T -> T function type - expr T quoted + +```scala +object Type: + given of[T <: AnyKind](using Quotes): Type[T] = ... + def show[T <: AnyKind](using Type[T])(using Quotes): String = ... + def valueOfConstant[T](using Type[T])(using Quotes): Option[T] = ... + def valueOfTuple[T <: Tuple](using Type[T])(using Quotes): Option[T] = ... ``` -Typing rules are formulated using a stack of environments -`Es`. Individual environments `E` consist as usual of variable -bindings `x: T`. Environments can be combined using the two -combinators `'` and `$`. + +##### `Quotes` +The `Quotes` interface is where most of the primitive operations of the quotation system are defined. + +Quotes define all the `Expr[T]` methods as extension methods. +`Type[T]` does not have methods and therefore does not appear here. +These methods are available as long as `Quotes` is implicitly given in the current scope. + +The `Quotes` instance is also the entry point to the [reflection API](./refelction.md) through the `reflect` object. + +Finally, `Quotes` provides the internal logic used in quote un-pickling (`QuoteUnpickler`) in quote pattern matching (`QuoteMatching`). +These interfaces are added to the self-type of the trait to make sure they are implemented on this object but not visible to users of `Quotes`. + +Internally, the implementation of `Quotes` will also track its current splicing scope `Scope`. +This scope will be attached to any expression that is created using this `Quotes` instance. + +```scala +trait Quotes: + this: runtime.QuoteUnpickler & runtime.QuoteMatching => + + extension [T](self: Expr[T]) + def show: String + def matches(that: Expr[Any]): Boolean + def value(using FromExpr[T]): Option[T] + def valueOrAbort(using FromExpr[T]): T + end extension + + extension (self: Expr[Any]) + def isExprOf[X](using Type[X]): Boolean + def asExprOf[X](using Type[X]): Expr[X] + end extension + + // abstract object reflect ... ``` -Environment E ::= () empty - E, x: T -Env. stack Es ::= () empty - E simple - Es * Es combined -Separator * ::= ' - $ +##### `Scope` +The splice context is represented as a stack (immutable list) of `Scope` objects. +Each `Scope` contains the position of the splice (used for error reporting) and a reference to the enclosing splice scope `Scope`. +A scope is a sub-scope of another if the other is contained in its parents. +This check is performed when an expression is spliced into another using the `Scope` provided in the current scope in `Quotes` and the one in the `Expr` or `Type`. + +### Entry Points +The two entry points for multi-stage programming are macros and the `run` operation. + +#### Macros +Inline macro definitions will inline a top-level splice (a splice not nested in a quote). +This splice needs to be evaluated at compile-time. +In _Avoiding a complete interpreter_[^1], we stated the following restrictions: + + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are either literal constants, quoted expressions (parameters), `Type.of` for type parameters and a reference to `Quotes`. + +These restrictions make the implementation of the interpreter quite simple. +Java Reflection is used to call the single function call in the top-level splice. +The execution of that function is entirely done on compiled bytecode. +These are Scala static methods and may not always become Java static methods, they might be inside module objects. +As modules are encoded as class instances, we need to interpret the prefix of the method to instantiate it before we can invoke the method. + +The code of the arguments has not been compiled and therefore needs to be interpreted by the compiler. +Interpreting literal constants is as simple as extracting the constant from the AST that represents literals. +When interpreting a quoted expression, the contents of the quote is kept as an AST which is wrapped inside the implementation of `Expr`. +Calls to `Type.of[T]` also wrap the AST of the type inside the implementation of `Type`. +Finally, the reference to `Quotes` is supposed to be the reference to the quotes provided by the splice. +This reference is interpreted as a new instance of `Quotes` that contains a fresh initial `Scope` with no parents. + +The result of calling the method via Java Reflection will return an `Expr` containing a new AST that was generated by the implementation of that macro. +The scope of this `Expr` is checked to make sure it did not extrude from some splice or `run` operation. +Then the AST is extracted from the `Expr` and it is inserted as replacement for the AST that contained the top-level splice. + + +#### Run-time Multi-Stage Programming + +To be able to compile the code, the `scala.quoted.staging` library defines the `Compiler` trait. +An instance of `staging.Compiler` is a wrapper over the normal Scala~3 compiler. +To be instantiated it requires an instance of the JVM _classloader_ of the application. + +```scala +import scala.quoted.staging.* +given Compiler = Compiler.make(getClass.getClassLoader) ``` -The two environment combinators are both associative with left and -right identity `()`. -### Operational semantics +The classloader is needed for the compiler to know which dependencies have been loaded and to load the generated code using the same classloader. Below is an example method `mkPower2` that is passed to `staging.run`: + +```scala +def mkPower2()(using Quotes): Expr[Double => Double] = ... -We define a small step reduction relation `-->` with the following rules: +run(mkPower2()) ``` - ((x: T) => t) v --> [x := v]t +To run the previous example, the compiler will create code equivalent to the following class and compile it using a new `Scope` without parents. + +```scala +class RunInstance: + def exec(): Double => Double = ${ mkPower2() } +``` +Finally, `run` will interpret `(new RunInstance).exec()` to evaluate the contents of the quote. +To do this, the resulting `RunInstance` class is loaded in the JVM using Java Reflection, instantiated and then the `exec` method is invoked. + + +### Compilation + +Quotes and splices are primitive forms in the generated typed abstract syntax trees. +These need to be type-checked with some extra rules, e.g., staging levels need to be checked and the references to generic types need to be adapted. +Finally, quoted expressions that will be generated at run-time need to be encoded (serialized/pickled) and decoded (deserialized/unpickled). + +#### Typing Quoted Expressions - ${'u} --> u +The typing process for quoted expressions and splices with `Expr` is relatively straightforward. +At its core, quotes are desugared into calls to `quote`, splices are desugared into calls to `splice`. +We track the quotation level when desugaring into these methods. - t1 --> t2 - ----------------- - e[t1] --> e[t2] + +```scala +def quote[T](x: T): Quotes ?=> Expr[T] + +def splice[T](x: Quotes ?=> Expr[T]): T ``` -The first rule is standard call-by-value beta-reduction. The second -rule says that splice and quotes cancel each other out. The third rule -is a context rule; it says that reduction is allowed in the hole `[ ]` -position of an evaluation context. Evaluation contexts `e` and -splice evaluation context `e_s` are defined syntactically as follows: + +It would be impossible to track the quotation levels if users wrote calls to these methods directly. +To know if it is a call to one of those methods we would need to type it first, but to type it we would need to know if it is one of these methods to update the quotation level. +Therefore these methods can only be used by the compiler. + +At run-time, the splice needs to have a reference to the `Quotes` that created its surrounding quote. +To simplify this for later phases, we track the current `Quotes` and encode a reference directly in the splice using `nestedSplice` instead of `splice`. + +```scala +def nestedSplice[T](q: Quotes)(x: q.Nested ?=> Expr[T]): T ``` -Eval context e ::= [ ] | e t | v e | 'e_s[${e}] -Splice context e_s ::= [ ] | (x: T) => e_s | e_s t | u e_s +With this addition, the original `splice` is only used for top-level splices. + +The levels are mostly used to identify top-level splices that need to be evaluated while typing. +We do not use the quotation level to influence the typing process. +Level checking is performed at a later phase. +This ensures that a source expression in a quote will have the same elaboration as a source expression outside the quote. + + + +#### Quote Pattern Matching + +Pattern matching is defined in the trait `QuoteMatching`, which is part of the self type of `Quotes`. +It is implemented by `Quotes` but not available to users of `Quotes`. +To access it, the compiler generates a cast from `Quotes` to `QuoteMatching` and then selects one of its two members: `ExprMatch` or `TypeMatch`. +`ExprMatch` defines an `unapply` extractor method that is used to encode quote patterns and `TypeMatch` defines an `unapply` method for quoted type patterns. + +```scala +trait Quotes: + self: runtime.QuoteMatching & ... => + ... + +trait QuoteMatching: + object ExprMatch: + def unapply[TypeBindings <: Tuple, Tup <: Tuple] + (scrutinee: Expr[Any]) + (using pattern: Expr[Any]): Option[Tup] = ... + object TypeMatch: + ... ``` -### Typing rules +These extractor methods are only meant to be used in code generated by the compiler. +The call to the extractor that is generated has an already elaborated form that cannot be written in source, namely explicit type parameters and explicit contextual parameters. + +This extractor returns a tuple type `Tup` which cannot be inferred from the types in the method signature. +This type will be computed when typing the quote pattern and will be explicitly added to the extractor call. +To refer to type variables in arbitrary places of `Tup`, we need to define them all before their use, hence we have `TypeBindings`, which will contain all pattern type variable definitions. +The extractor also receives a given parameter of type `Expr[Any]` that will contain an expression that represents the pattern. +The compiler will explicitly add this pattern expression. +We use a given parameter because these are the only parameters we are allowed to add to the extractor call in a pattern position. + +This extractor is a bit convoluted, but it encodes away all the quotation-specific features. +It compiles the pattern down into a representation that the pattern matcher compiler phase understands. -Typing judgments are of the form `Es |- t: T`. There are two -substructural rules which express the fact that quotes and splices -cancel each other out: +The quote patterns are encoded into two parts: a tuple pattern that is tasked with extracting the result of the match and a quoted expression representing the pattern. +For example, if the pattern has no `$` we will have an `EmptyTuple` as the pattern and `'{1}` to represent the pattern. + +```scala + case '{ 1 } => +// is elaborated to + case ExprMatch(EmptyTuple)(using '{1}) => +// ^^^^^^^^^^ ^^^^^^^^^^ +// pattern expression +``` +When extracting expressions, each pattern that is contained in a splice `${..}` will be placed in order in the tuple pattern. +In the following case, the `f` and `x` are placed in a tuple pattern `(f, x)`. +The type of the tuple is encoded in the `Tup` and not only in the tuple itself. +Otherwise, the extractor would return a tuple `Tuple` for which the types need to be tested which is in turn not possible due to type erasure. + +```scala + case '{ ((y: Int) => $f(y)).apply($x) } => +// is elaborated to + case ExprMatch[.., (Expr[Int => Int], Expr[Int])]((f, x))(using pattern) => +// pattern = '{ ((y: Int) => pat[Int](y)).apply(pat[Int]()) } ``` - Es1 * Es2 |- t: T - --------------------------- - Es1 $ E1 ' E2 * Es2 |- t: T +The contents of the quote are transformed into a valid quote expression by replacing the splice with a marker expression `pat[T](..)`. +The type `T` is taken from the type of the splice and the arguments are the HOAS arguments. +This implies that a `pat[T]()` is a closed pattern and `pat[T](y)` is an HOAS pattern that can refer to `y`. - Es1 * Es2 |- t: T - --------------------------- - Es1 ' E1 $ E2 * Es2 |- t: T +Type variables in quoted patterns are first normalized to have all definitions at the start of the pattern. +For each definition of a type variable `t` in the pattern we will add a type variable definition in `TypeBindings`. +Each one will have a corresponding `Type[t]` that will get extracted if the pattern matches. +These `Type[t]` are also listed in the `Tup` and added in the tuple pattern. +It is additionally marked as `using` in the pattern to make it implicitly available in this case branch. + + +```scala + case '{ type t; ($xs: List[t]).map[t](identity[t]) } => +// is elaborated to + case ExprMatch[(t), (Type[t], Expr[List[t]])]((using t, xs))(using p) => +// ^^^ ^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^ ^^^^^^^ +// type bindings result type pattern expression +// p = '{ @patternType type u; pat[List[u]]().map[u](identity[u]) } ``` -The lambda calculus fragment of the rules is standard, except that we -use a stack of environments. The rules only interact with the topmost -environment of the stack. + +The contents of the quote are transformed into a valid quote expression by replacing type variables with fresh ones that do not escape the quote scope. +These are also annotated to be easily identifiable as pattern variables. + +#### Level Consistency Checking +Level consistency checking is performed after typing the program as a static check. +To check level consistency we traverse the tree top-down remembering the context staging level. +Each local definition in scope is recorded with its level and each term reference to a definition is checked against the current staging level. +```scala +// level 0 +'{ // level 1 + val x = ... // level 1 with (x -> 1) + ${ // level 0 (x -> 1) + val y = ... // level 0 with (x -> 1, y -> 0) + x // error: defined at level 1 but used in level 0 + } + // level 1 (x -> 1) + x // x is ok +} ``` - x: T in E - -------------- - Es * E |- x: T +#### Type Healing - Es * E, x: T1 |- t: T2 - ------------------------------- - Es * E |- (x: T1) => t: T -> T2 +When using a generic type `T` in a future stage, it is necessary to have a given `Type[T]` in scope. +The compiler needs to identify those references and link them with the instance of `Type[T]`. +For instance consider the following example: +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[T] } +``` - Es |- t1: T2 -> T Es |- t2: T2 - --------------------------------- - Es |- t1 t2: T +For each reference to a generic type `T` that is defined at level 0 and used at level 1 or greater, the compiler will summon a `Type[T]`. +This is usually the given type that is provided as parameter, `t` in this case. +We can use the type `t.Underlying` to replace `T` as it is an alias of that type. +But `t.Underlying` contains the extra information that it is `t` that will be used in the evaluation of the quote. +In a sense, `Underlying` acts like a splice for types. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[t.Underlying] } ``` -The rules for quotes and splices map between `expr T` and `T` by trading `'` and `$` between -environments and terms. + +Due to some technical limitations, it is not always possible to replace the type reference with the AST containing `t.Underlying`. +To overcome this limitation, we can simply define a list of type aliases at the start of the quote and insert the `t.Underlying` there. +This has the added advantage that we do not have to repeatedly insert the `t.Underlying` in the quote. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ type U = t.Underlying; List.empty[U] } +``` +These aliases can be used at any level within the quote and this transformation is only performed on quotes that are at level 0. + +```scala + '{ List.empty[T] ... '{ List.empty[T] } ... } +// becomes + '{ type U = t.Underlying; List.empty[U] ... '{ List.empty[U] } ... } +``` +If we define a generic type at level 1 or greater, it will not be subject to this transformation. +In some future compilation stage, when the definition of the generic type is at level 0, it will be subject to this transformation. +This simplifies the transformation logic and avoids leaking the encoding into code that a macro could inspect. + +```scala +'{ + def emptyList[T: Type](using Quotes): Expr[List[T]] = '{ List.empty[T] } + ... +} +``` +A similar transformation is performed on `Type.of[T]`. +Any generic type in `T` needs to have an implicitly given `Type[T]` in scope, which will also be used as a path. +The example: + +```scala +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[T] match ... +// becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[t.Underlying] match ... +// then becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + t match ... +``` + +The operation `Type.of[t.Underlying]` can be optimized to just `t`. +But this is not always the case. +If the generic reference is nested in the type, we will need to keep the `Type.of`. + +```scala +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[T]] match ... +// becomes +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[t.Underlying]] match ... +``` + +By doing this transformation, we ensure that each abstract type `U` used in `Type.of` has an implicit `Type[U]` in scope. +This representation makes it simpler to identify parts of the type that are statically known from those that are known dynamically. +Type aliases are also added within the type of the `Type.of` though these are not valid source code. +These would look like `Type.of[{type U = t.Underlying; Map[U, U]}]` if written in source code. + + +#### Splice Normalization + +The contents of a splice may refer to variables defined in the enclosing quote. +This complicates the process of serialization of the contents of the quotes. +To make serialization simple, we first transform the contents of each level 1 splice. +Consider the following example: + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ powerCode('{x}, n) } +} +``` + +The variable `x` is defined in the quote and used in the splice. +The normal form will extract all references to `x` and replace them with a staged version of `x`. +We will replace the reference to `x` of type `T` with a `$y` where `y` is of type `Expr[T]`. +Then we wrap the new contents of the splice in a lambda that defines `y` and apply it to the quoted version of `x`. +After this transformation we have 2 parts, a lambda without references to the quote, which knows how to compute the contents of the splice, and a sequence of quoted arguments that refer to variables defined in the lambda. + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ ((y: Expr[Int]) => powerCode('{$y}, n)).apply('x) } +} +``` + +In general, the splice normal form has the shape `${ .apply(*) }` and the following constraints: + * `` a lambda expression that does not refer to variables defined in the outer quote + * `` sequence of quoted expressions or `Type.of` containing references to variables defined in the enclosing quote and no references to local variables defined outside the enclosing quote + + +##### Function references normalization +A reference to a function `f` that receives parameters is not a valid value in Scala. +Such a function reference `f` can be eta-expanded as `x => f(x)` to be used as a lambda value. +Therefore function references cannot be transformed by the normalization as directly as other expressions as we cannot represent `'{f}` with a method reference type. +We can use the eta-expanded form of `f` in the normalized form. +For example, consider the reference to `f` below. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ '{ f(3)(4, 5) } } +} +``` + +To normalize this code, we can eta-expand the reference to `f` and place it in a quote containing a proper expression. +Therefore the normalized form of the argument `'{f}` becomes the quoted lambda `'{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }` and is an expression of type `Expr[Int => (Int, Int) => Int]`. +The eta-expansion produces one curried lambda per parameter list. +The application `f(3)(4, 5)` does not become `$g(3)(4, 5)` but `$g.apply(3).apply(4, 5)`. +We add the `apply` because `g` is not a quoted reference to a function but a curried lambda. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ + ( + (g: Expr[Int => (Int, Int) => Int]) => '{$g.apply(3).apply(4, 5)} + ).apply('{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }) + } +} +``` + +Then we can apply it and beta-reduce the application when generating the code. + +```scala + (g: Expr[Int => Int => Int]) => betaReduce('{$g.apply(3).apply(4)}) +``` + + +##### Variable assignment normalization +A reference to a mutable variable in the left-hand side of an assignment cannot be transformed directly as it is not in an expression position. +```scala +'{ + var x: Int = 5 + ${ g('{x = 2}) } +} +``` + +We can use the same strategy used for function references by eta-expanding the assignment operation `x = _` into `y => x = y`. + +```scala +'{ + var x: Int = 5 + ${ + g( + ( + (f: Expr[Int => Unit]) => betaReduce('{$f(2)}) + ).apply('{ (y: Int) => x = $y }) + ) + } +} +``` + + +##### Type normalization +Types defined in the quote are subject to a similar transformation. +In this example, `T` is defined within the quote at level 1 and used in the splice again at level 1. + +```scala +'{ def f[T] = ${ '{g[T]} } } +``` + +The normalization will add a `Type[T]` to the lambda, and we will insert this reference. +The difference is that it will add an alias similar to the one used in type healing. +In this example, we create a `type U` that aliases the staged type. + +```scala +'{ + def f[T] = ${ + ( + (t: Type[T]) => '{type U = t.Underling; g[U]} + ).apply(Type.of[T]) + } +} +``` + +#### Serialization + +Quoted code needs to be pickled to make it available at run-time in the next compilation phase. +We implement this by pickling the AST as a TASTy binary. + +##### TASTy +The TASTy format is the typed abstract syntax tree serialization format of Scala 3. +It usually pickles the fully elaborated code after type-checking and is kept along the generated Java classfiles. + + +##### Pickling +We use TASTy as a serialization format for the contents of the quotes. +To show how serialization is performed, we will use the following example. +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ powerCode('{x}, '{n}) } * ${ powerCode('{2}, '{n}) } +} ``` - Es $ () |- t: expr T - -------------------- - Es |- $t: T +This quote is transformed into the following code when normalizing the splices. - Es ' () |- t: T - ---------------- - Es |- 't: expr T +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) + } * ${ + ((m: Expr[Int]) => powerCode('{2}, m)).apply('n) + } +} ``` -The meta theory of a slightly simplified 2-stage variant of this calculus -is studied [separately](./simple-smp.md). -## Going Further +Splice normalization is a key part of the serialization process as it only allows references to variables defined in the quote in the arguments of the lambda in the splice. +This makes it possible to create a closed representation of the quote without much effort. +The first step is to remove all the splices and replace them with holes. +A hole is like a splice but it lacks the knowledge of how to compute the contents of the splice. +Instead, it knows the index of the hole and the contents of the arguments of the splice. +We can see this transformation in the following example where a hole is represented by `<< idx; holeType; args* >>`. + +```scala + ${ ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) } +// becomes + << 0; Double; x, n >> +``` -The metaprogramming framework as presented and currently implemented is quite restrictive -in that it does not allow for the inspection of quoted expressions and -types. It’s possible to work around this by providing all necessary -information as normal, unquoted inline parameters. But we would gain -more flexibility by allowing for the inspection of quoted code with -pattern matching. This opens new possibilities. +As this was the first hole it has index 0. +The hole type is `Double`, which needs to be remembered now that we cannot infer it from the contents of the splice. +The arguments of the splice are `x` and `n`; note that they do not require quoting because they were moved out of the splice. -For instance, here is a version of `power` that generates the multiplications -directly if the exponent is statically known and falls back to the dynamic -implementation of `power` otherwise. +References to healed types are handled in a similar way. +Consider the `emptyList` example, which shows the type aliases that are inserted into the quote. ```scala -import scala.quoted.* +'{ List.empty[T] } +// type healed to +'{ type U = t.Underlying; List.empty[U] } +``` +Instead of replacing a splice, we replace the `t.Underlying` type with a type hole. +The type hole is represented by `<< idx; bounds >>`. +```scala +'{ type U = << 0; Nothing..Any >>; List.empty[U] } +``` +Here, the bounds of `Nothing..Any` are the bounds of the original `T` type. +The types of a `Type.of` are transformed in the same way. -inline def power(x: Double, n: Int): Double = - ${ powerExpr('x, 'n) } -private def powerExpr(x: Expr[Double], n: Expr[Int]) - (using Quotes): Expr[Double] = - n.value match - case Some(m) => powerExpr(x, m) - case _ => '{ dynamicPower($x, $n) } +With these transformations, the contents of the quote or `Type.of` are guaranteed to be closed and therefore can be pickled. +The AST is pickled into TASTy, which is a sequence of bytes. +This sequence of bytes needs to be instantiated in the bytecode, but unfortunately it cannot be dumped into the classfile as bytes. +To reify it we encode the bytes into a Java `String`. +In the following examples we display this encoding in human readable form with the fictitious `|tasty"..."|` string literal. -private def powerExpr(x: Expr[Double], n: Int) - (using Quotes): Expr[Double] = - if n == 0 then '{ 1.0 } - else if n == 1 then x - else if n % 2 == 0 then '{ val y = $x * $x; ${ powerExpr('y, n / 2) } } - else '{ $x * ${ powerExpr(x, n - 1) } } +```scala +// pickled AST bytes encoded in a base64 string +tasty""" + val (x, n): (Double, Int) = (5, 2) + << 0; Double; x, n >> * << 1; Double; n >> +""" +// or +tasty""" + type U = << 0; Nothing..Any; >> + List.empty[U] +""" +``` +The contents of a quote or `Type.of` are not always pickled. +In some cases it is better to generate equivalent (smaller and/or faster) code that will compute the expression. +Literal values are compiled into a call to `Expr()` using the implementation of `ToExpr` to create the quoted expression. +This is currently performed only on literal values, but can be extended to any value for which we have a `ToExpr` defined in the standard library. +Similarly, for non-generic types we can use their respective `java.lang.Class` and convert them into a `Type` using a primitive operation `typeConstructorOf` defined in the reflection API. -private def dynamicPower(x: Double, n: Int): Double = - if n == 0 then 1.0 - else if n % 2 == 0 then dynamicPower(x * x, n / 2) - else x * dynamicPower(x, n - 1) +##### Unpickling + +Now that we have seen how a quote is pickled, we can look at how to unpickle it. +We will continue with the previous example. + +Holes were used to replace the splices in the quote. +When we perform this transformation we also need to remember the lambdas from the splices and their hole index. +When unpickling a hole, the corresponding splice lambda will be used to compute the contents of the hole. +The lambda will receive as parameters quoted versions of the arguments of the hole. +For example to compute the contents of `<< 0; Double; x, n >>` we will evaluate the following code + +```scala + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) ``` -In the above, the method `.value` maps a constant expression of the type -`Expr[T]` to its value of the type `T`. +The evaluation is not as trivial as it looks, because the lambda comes from compiled code and the rest is code that must be interpreted. +We put the AST of `x` and `n` into `Expr` objects to simulate the quotes and then we use Java Reflection to call the `apply` method. + +We may have many holes in a quote and therefore as many lambdas. +To avoid the instantiation of many lambdas, we can join them together into a single lambda. +Apart from the list of arguments, this lambda will also take the index of the hole that is being evaluated. +It will perform a switch match on the index and call the corresponding lambda in each branch. +Each branch will also extract the arguments depending on the definition of the lambda. +The application of the original lambdas are beta-reduced to avoid extra overhead. -With the right extractors, the "AsFunction" conversion -that maps expressions over functions to functions over expressions can -be implemented in user code: ```scala -given AsFunction1[T, U]: Conversion[Expr[T => U], Expr[T] => Expr[U]] with - def apply(f: Expr[T => U]): Expr[T] => Expr[U] = - (x: Expr[T]) => f match - case Lambda(g) => g(x) - case _ => '{ ($f)($x) } +(idx: Int, args: Seq[Any]) => + idx match + case 0 => // for << 0; Double; x, n >> + val x = args(0).asInstanceOf[Expr[Double]] + val n = args(1).asInstanceOf[Expr[Int]] + powerCode(x, n) + case 1 => // for << 1; Double; n >> + val n = args(0).asInstanceOf[Expr[Int]] + powerCode('{2}, n) ``` -This assumes an extractor + +This is similar to what we do for splices when we replace the type aliased with holes we keep track of the index of the hole. +Instead of lambdas, we will have a list of references to instances of `Type`. +From the following example we would extract `t`, `u`, ... . + ```scala -object Lambda: - def unapply[T, U](x: Expr[T => U]): Option[Expr[T] => Expr[U]] + '{ type T1 = t1.Underlying; type Tn = tn.Underlying; ... } +// with holes + '{ type T1 = << 0; ... >>; type Tn = << n-1; ... >>; ... } ``` -Once we allow inspection of code via extractors, it’s tempting to also -add constructors that create typed trees directly without going -through quotes. Most likely, those constructors would work over `Expr` -types which lack a known type argument. For instance, an `Apply` -constructor could be typed as follows: + +As the type holes are at the start of the quote, they will have the first `N` indices. +This implies that we can place the references in a sequence `Seq(t, u, ...)` where the index in the sequence is the same as the hole index. + +Lastly, the quote itself is replaced by a call to `QuoteUnpickler.unpickleExpr` which will unpickle the AST, evaluate the holes, i.e., splices, and wrap the resulting AST in an `Expr[Int]`. +This method takes takes the pickled `|tasty"..."|`, the types and the hole lambda. +Similarly, `Type.of` is replaced with a call to `QuoteUnpickler.unpickleType` but only receives the pickled `|tasty"..."|` and the types. +Because `QuoteUnpickler` is part of the self-type of the `Quotes` class, we have to cast the instance but know that this cast will always succeed. + ```scala -def Apply(fn: Expr[Any], args: List[Expr[Any]]): Expr[Any] +quotes.asInstanceOf[runtime.QuoteUnpickler].unpickleExpr[T]( + pickled = tasty"...", + types = Seq(...), + holes = (idx: Int, args: Seq[Any]) => idx match ... +) ``` -This would allow constructing applications from lists of arguments -without having to match the arguments one-by-one with the -corresponding formal parameter types of the function. We then need "at -the end" a method to convert an `Expr[Any]` to an `Expr[T]` where `T` is -given from the outside. For instance, if `code` yields a `Expr[Any]`, then -`code.atType[T]` yields an `Expr[T]`. The `atType` method has to be -implemented as a primitive; it would check that the computed type -structure of `Expr` is a subtype of the type structure representing -`T`. -Before going down that route, we should evaluate in detail the tradeoffs it -presents. Constructing trees that are only verified _a posteriori_ -to be type correct loses a lot of guidance for constructing the right -trees. So we should wait with this addition until we have more -use-cases that help us decide whether the loss in type-safety is worth -the gain in flexibility. In this context, it seems that deconstructing types is -less error-prone than deconstructing terms, so one might also -envisage a solution that allows the former but not the latter. - -## Conclusion - -Metaprogramming has a reputation of being difficult and confusing. -But with explicit `Expr/Type` types and quotes and splices it can become -downright pleasant. A simple strategy first defines the underlying quoted or unquoted -values using `Expr` and `Type` and then inserts quotes and splices to make the types -line up. Phase consistency is at the same time a great guideline -where to insert a splice or a quote and a vital sanity check that -the result makes sense. +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Multi-stage programming with generative and analytical macros](https://dl.acm.org/doi/10.1145/3486609.3487203). +[^3]: In quotes, identifiers starting with `$` must be surrounded by backticks (`` `$` ``). For example `$conforms` from `scala.Predef`. diff --git a/docs/_docs/reference/metaprogramming/macros.md b/docs/_docs/reference/metaprogramming/macros.md index 8045794d1143..a91e69d985f0 100644 --- a/docs/_docs/reference/metaprogramming/macros.md +++ b/docs/_docs/reference/metaprogramming/macros.md @@ -6,818 +6,617 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros.h > When developing macros enable `-Xcheck-macros` scalac option flag to have extra runtime checks. -## Macros: Quotes and Splices +## Multi-Staging -Macros are built on two well-known fundamental operations: quotation and splicing. -Quotation is expressed as `'{...}` for expressions and splicing is expressed as `${ ... }`. -Additionally, within a quote or a splice we can quote or splice identifiers directly (i.e. `'e` and `$e`). -Readers may notice the resemblance of the two aforementioned syntactic -schemes with the familiar string interpolation syntax. +#### Quoted expressions +Multi-stage programming in Scala 3 uses quotes `'{..}` to delay, i.e., stage, execution of code and splices `${..}` to evaluate and insert code into quotes. +Quoted expressions are typed as `Expr[T]` with a covariant type parameter `T`. +It is easy to write statically safe code generators with these two concepts. +The following example shows a naive implementation of the $x^n$ mathematical operation. ```scala -println(s"Hello, $name, here is the result of 1 + 1 = ${1 + 1}") +import scala.quoted.* +def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } + else if n == 1 then x + else '{ $x * ${ unrolledPowerCode(x, n-1) } } ``` -In string interpolation we _quoted_ a string and then we _spliced_ into it, two others. The first, `name`, is a reference to a value of type [`String`](https://scala-lang.org/api/3.x/scala/Predef$.html#String-0), and the second is an arithmetic expression that will be _evaluated_ followed by the splicing of its string representation. - -Quotes and splices in this section allow us to treat code in a similar way, -effectively supporting macros. The entry point for macros is an inline method -with a top-level splice. We call it a top-level because it is the only occasion -where we encounter a splice outside a quote (consider as a quote the -compilation-unit at the call-site). For example, the code below presents an -`inline` method `assert` which calls at compile-time a method `assertImpl` with -a boolean expression tree as argument. `assertImpl` evaluates the expression and -prints it again in an error message if it evaluates to `false`. - ```scala -import scala.quoted.* - -inline def assert(inline expr: Boolean): Unit = - ${ assertImpl('expr) } - -def assertImpl(expr: Expr[Boolean])(using Quotes) = '{ - if !$expr then - throw AssertionError(s"failed assertion: ${${ showExpr(expr) }}") +'{ + val x = ... + ${ unrolledPowerCode('{x}, 3) } // evaluates to: x * x * x } - -def showExpr(expr: Expr[Boolean])(using Quotes): Expr[String] = - '{ [actual implementation later in this document] } ``` -If `e` is an expression, then `'{e}` represents the typed -abstract syntax tree representing `e`. If `T` is a type, then `Type.of[T]` -represents the type structure representing `T`. The precise -definitions of "typed abstract syntax tree" or "type-structure" do not -matter for now, the terms are used only to give some -intuition. Conversely, `${e}` evaluates the expression `e`, which must -yield a typed abstract syntax tree or type structure, and embeds the -result as an expression (respectively, type) in the enclosing program. +Quotes and splices are duals of each other. +For an arbitrary expression `x` of type `T` we have `${'{x}} = x` and for an arbitrary expression `e` of type `Expr[T]` we have `'{${e}} = e`. -Quotations can have spliced parts in them; in this case the embedded -splices are evaluated and embedded as part of the formation of the -quotation. +#### Abstract types +Quotes can handle generic and abstract types using the type class `Type[T]`. +A quote that refers to a generic or abstract type `T` requires a given `Type[T]` to be provided in the implicit scope. +The following examples show how `T` is annotated with a context bound (`: Type`) to provide an implicit `Type[T]`, or the equivalent `using Type[T]` parameter. -Quotes and splices can also be applied directly to identifiers. An identifier -`$x` starting with a `$` that appears inside a quoted expression or type is treated as a -splice `${x}`. Analogously, an quoted identifier `'x` that appears inside a splice -is treated as a quote `'{x}`. See the Syntax section below for details. +```scala +import scala.quoted.* +def singletonListExpr[T: Type](x: Expr[T])(using Quotes): Expr[List[T]] = + '{ List[T]($x) } // generic T used within a quote -Quotes and splices are duals of each other. -For arbitrary expressions `e` we have: +def emptyListExpr[T](using Type[T], Quotes): Expr[List[T]] = + '{ List.empty[T] } // generic T used within a quote +``` +If no other instance is found, the default `Type.of[T]` is used. +The following example implicitly uses `Type.of[String]` and `Type.of[Option[U]]`. ```scala -${'{e}} = e -'{${e}} = e +val list1: Expr[List[String]] = + singletonListExpr('{"hello"}) // requires a given `Type[Sting]` +val list0: Expr[List[Option[T]]] = + emptyListExpr[Option[U]] // requires a given `Type[Option[U]]` ``` -## Types for Quotations -The type signatures of quotes and splices can be described using -two fundamental types: - -- `Expr[T]`: abstract syntax trees representing expressions of type `T` -- `Type[T]`: non erased representation of type `T`. - -Quoting takes expressions of type `T` to expressions of type `Expr[T]` -and it takes types `T` to expressions of type `Type[T]`. Splicing -takes expressions of type `Expr[T]` to expressions of type `T` and it -takes expressions of type `Type[T]` to types `T`. - -The two types can be defined in package [`scala.quoted`](https://scala-lang.org/api/3.x/scala/quoted.html) as follows: +The `Type.of[T]` method is a primitive operation that the compiler will handle specially. +It will provide the implicit if the type `T` is statically known, or if `T` contains some other types `Ui` for which we have an implicit `Type[Ui]`. +In the example, `Type.of[String]` has a statically known type and `Type.of[Option[U]]` requires an implicit `Type[U]` in scope. +#### Quote context +We also track the current quotation context using a given `Quotes` instance. +To create a quote `'{..}` we require a given `Quotes` context, which should be passed as a contextual parameter `(using Quotes)` to the function. +Each splice will provide a new `Quotes` context within the scope of the splice. +Therefore quotes and splices can be seen as methods with the following signatures, but with special semantics. ```scala -package scala.quoted +def '[T](x: T): Quotes ?=> Expr[T] // def '[T](x: T)(using Quotes): Expr[T] -sealed trait Expr[+T] -sealed trait Type[T] +def $[T](x: Quotes ?=> Expr[T]): T ``` -Both `Expr` and `Type` are abstract and sealed, so all constructors for -these types are provided by the system. One way to construct values of -these types is by quoting, the other is by type-specific lifting -operations that will be discussed later on. - -## The Phase Consistency Principle +The lambda with a question mark `?=>` is a contextual function; it is a lambda that takes its argument implicitly and provides it implicitly in the implementation the lambda. +`Quotes` are used for a variety of purposes that will be mentioned when covering those topics. -A fundamental *phase consistency principle* (PCP) regulates accesses -to free variables in quoted and spliced code: +## Quoted Values -- _For any free variable reference `x`, the number of quoted scopes and the number of spliced scopes between the reference to `x` and the definition of `x` must be equal_. +#### Lifting +While it is not possible to use cross-stage persistence of local variables, it is possible to lift them to the next stage. +To this end, we provide the `Expr.apply` method, which can take a value and lift it into a quoted representation of the value. -Here, `this`-references count as free variables. On the other -hand, we assume that all imports are fully expanded and that `_root_` is -not a free variable. So references to global definitions are -allowed everywhere. +```scala +val expr1plus1: Expr[Int] = '{ 1 + 1 } -The phase consistency principle can be motivated as follows: First, -suppose the result of a program `P` is some quoted text `'{ ... x -... }` that refers to a free variable `x` in `P`. This can be -represented only by referring to the original variable `x`. Hence, the -result of the program will need to persist the program state itself as -one of its parts. We don’t want to do this, hence this situation -should be made illegal. Dually, suppose a top-level part of a program -is a spliced text `${ ... x ... }` that refers to a free variable `x` -in `P`. This would mean that we refer during _construction_ of `P` to -a value that is available only during _execution_ of `P`. This is of -course impossible and therefore needs to be ruled out. Now, the -small-step evaluation of a program will reduce quotes and splices in -equal measure using the cancellation rules above. But it will neither -create nor remove quotes or splices individually. So the PCP ensures -that program elaboration will lead to neither of the two unwanted -situations described above. +val expr2: Expr[Int] = Expr(1 + 1) // lift 2 into '{ 2 } +``` -In what concerns the range of features it covers, this form of macros introduces -a principled metaprogramming framework that is quite close to the MetaML family of -languages. One difference is that MetaML does not have an equivalent of the PCP - -quoted code in MetaML _can_ access variables in its immediately enclosing -environment, with some restrictions and caveats since such accesses involve -serialization. However, this does not constitute a fundamental gain in -expressiveness. +While it looks type wise similar to `'{ 1 + 1 }`, the semantics of `Expr(1 + 1)` are quite different. +`Expr(1 + 1)` will not stage or delay any computation; the argument is evaluated to a value and then lifted into a quote. +The quote will contain code that will create a copy of this value in the next stage. +`Expr` is polymorphic and user-extensible via the `ToExpr` type class. -## From `Expr`s to Functions and Back +```scala +trait ToExpr[T]: + def apply(x: T)(using Quotes): Expr[T] +``` -It is possible to convert any `Expr[T => R]` into `Expr[T] => Expr[R]` and back. -These conversions can be implemented as follows: +We can implement a `ToExpr` using a `given` definition that will add the definition to the implicits in scope. +In the following example we show how to implement a `ToExpr[Option[T]]` for any liftable type `T. ```scala -def to[T: Type, R: Type](f: Expr[T] => Expr[R])(using Quotes): Expr[T => R] = - '{ (x: T) => ${ f('x) } } - -def from[T: Type, R: Type](f: Expr[T => R])(using Quotes): Expr[T] => Expr[R] = - (x: Expr[T]) => '{ $f($x) } +given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with + def apply(opt: Option[T])(using Quotes): Expr[Option[T]] = + opt match + case Some(x) => '{ Some[T]( ${Expr(x)} ) } + case None => '{ None } ``` -Note how the fundamental phase consistency principle works in two -different directions here for `f` and `x`. In the method `to`, the reference to `f` is -legal because it is quoted, then spliced, whereas the reference to `x` -is legal because it is spliced, then quoted. +The `ToExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. -They can be used as follows: +#### Extracting values from quotes +To be able to generate optimized code using the method `unrolledPowerCode`, the macro implementation `powerCode` needs to first +determine whether the argument passed as parameter `n` is a known constant value. +This can be achieved via _unlifting_ using the `Expr.unapply` extractor from our library implementation, which will only match if `n` is a quoted constant and extracts its value. ```scala -val f1: Expr[Int => String] = - to((x: Expr[Int]) => '{ $x.toString }) // '{ (x: Int) => x.toString } - -val f2: Expr[Int] => Expr[String] = - from('{ (x: Int) => x.toString }) // (x: Expr[Int]) => '{ ((x: Int) => x.toString)($x) } -f2('{2}) // '{ ((x: Int) => x.toString)(2) } +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + n match + case Expr(m) => // it is a constant: unlift code n='{m} into number m + unrolledPowerCode(x, m) + case _ => // not known: call power at run-time + '{ power($x, $n) } ``` -One limitation of `from` is that it does not β-reduce when a lambda is called immediately, as evidenced in the code `{ ((x: Int) => x.toString)(2) }`. -In some cases we want to remove the lambda from the code, for this we provide the method `Expr.betaReduce` that turns a tree -describing a function into a function mapping trees to trees. - +Alternatively, the `n.value` method can be used to get an `Option[Int]` with the value or `n.valueOrAbort` to get the value directly. ```scala -object Expr: - ... - def betaReduce[...](...)(...): ... = ... +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + // emits an error message if `n` is not a constant + unrolledPowerCode(x, n.valueOrAbort) ``` -The definition of `Expr.betaReduce(f)(x)` is assumed to be functionally the same as -`'{($f)($x)}`, however it should optimize this call by returning the -result of beta-reducing `f(x)` if `f` is a known lambda expression. -`Expr.betaReduce` distributes applications of `Expr` over function arrows: +`Expr.unapply` and all variants of `value` are polymorphic and user-extensible via a given `FromExpr` type class. ```scala -Expr.betaReduce(_): Expr[(T1, ..., Tn) => R] => ((Expr[T1], ..., Expr[Tn]) => Expr[R]) +trait FromExpr[T]: + def unapply(x: Expr[T])(using Quotes): Option[T] ``` -## Lifting Types - -Types are not directly affected by the phase consistency principle. -It is possible to use types defined at any level in any other level. -But, if a type is used in a subsequent stage it will need to be lifted to a `Type`. -Indeed, the definition of `to` above uses `T` in the next stage, there is a -quote but no splice between the parameter binding of `T` and its -usage. But the code can be rewritten by adding an explicit binding of a `Type[T]`: +We can use `given` definitions to implement the `FromExpr` as we did for `ToExpr`. +The `FromExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. +To implement `FromExpr` for non-primitive types we use quote pattern matching (for example `OptionFromExpr`). -```scala -def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T])(using Type[R], Quotes): Expr[T => R] = - '{ (x: t.Underlying) => ${ f('x) } } -``` -In this version of `to`, the type of `x` is now the result of -inserting the type `Type[T]` and selecting its `Underlying`. +## Macros and Multi-Stage Programming -To avoid clutter, the compiler converts any type reference to -a type `T` in subsequent phases to `summon[Type[T]].Underlying`. +The system supports multi-stage macros and run-time multi-stage programming using the same quotation abstractions. -And to avoid duplication it does it once per type, and creates -an alias for that type at the start of the quote. +### Multi-Stage Macros -For instance, the user-level definition of `to`: +#### Macros +We can generalize the splicing abstraction to express macros. +A macro consists of a top-level splice that is not nested in any quote. +Conceptually, the contents of the splice are evaluated one stage earlier than the program. +In other words, the contents are evaluated while compiling the program. The generated code resulting from the macro replaces the splice in the program. ```scala -def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = - '{ (x: T) => ${ f('x) } } +def power2(x: Double): Double = + ${ unrolledPowerCode('x, 2) } // x * x ``` -would be rewritten to +#### Inline macros +Since using the splices in the middle of a program is not as ergonomic as calling a function; we hide the staging mechanism from end-users of macros. We have a uniform way of calling macros and normal functions. +For this, _we restrict the use of top-level splices to only appear in inline methods_[^1][^2]. ```scala -def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = - '{ - type T = t.Underlying - (x: T) => ${ f('x) } - } +// inline macro definition +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + +// user code +def power2(x: Double): Double = + powerMacro(x, 2) // x * x ``` -The `summon` query succeeds because there is a given instance of -type `Type[T]` available (namely the given parameter corresponding -to the context bound `: Type`), and the reference to that value is -phase-correct. If that was not the case, the phase inconsistency for -`T` would be reported as an error. +The evaluation of the macro will only happen when the code is inlined into `power2`. +When inlined, the code is equivalent to the previous definition of `power2`. +A consequence of using inline methods is that none of the arguments nor the return type of the macro will have to mention the `Expr` types; this hides all aspects of metaprogramming from the end-users. -## Lifting Expressions +#### Avoiding a complete interpreter +When evaluating a top-level splice, the compiler needs to interpret the code that is within the splice. +Providing an interpreter for the entire language is quite tricky, and it is even more challenging to make that interpreter run efficiently. +To avoid needing a complete interpreter, we can impose the following restrictions on splices to simplify the evaluation of the code in top-level splices. + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are literal constants, quoted expressions (parameters), calls to `Type.of` for type parameters and a reference to `Quotes`. -Consider the following implementation of a staged interpreter that implements -a compiler through staging. +In particular, these restrictions disallow the use of splices in top-level splices. +Such a splice would require several stages of interpretation which would be unnecessarily inefficient. +#### Compilation stages +The macro implementation (i.e., the method called in the top-level splice) can come from any pre-compiled library. +This provides a clear difference between the stages of the compilation process. +Consider the following 3 source files defined in distinct libraries. ```scala -import scala.quoted.* - -enum Exp: - case Num(n: Int) - case Plus(e1: Exp, e2: Exp) - case Var(x: String) - case Let(x: String, e: Exp, in: Exp) - -import Exp.* +// Macro.scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = ... +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } ``` -The interpreted language consists of numbers `Num`, addition `Plus`, and variables -`Var` which are bound by `Let`. Here are two sample expressions in the language: - ```scala -val exp = Plus(Plus(Num(2), Var("x")), Num(4)) -val letExp = Let("x", Num(3), exp) +// Lib.scala (depends on Macro.scala) +def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) ``` -Here’s a compiler that maps an expression given in the interpreted -language to quoted Scala code of type `Expr[Int]`. -The compiler takes an environment that maps variable names to Scala `Expr`s. - ```scala -import scala.quoted.* - -def compile(e: Exp, env: Map[String, Expr[Int]])(using Quotes): Expr[Int] = - e match - case Num(n) => - Expr(n) - case Plus(e1, e2) => - '{ ${ compile(e1, env) } + ${ compile(e2, env) } } - case Var(x) => - env(x) - case Let(x, e, body) => - '{ val y = ${ compile(e, env) }; ${ compile(body, env + (x -> 'y)) } } +// App.scala (depends on Lib.scala) +@main def app() = power2(3.14) ``` - -Running `compile(letExp, Map())` would yield the following Scala code: +One way to syntactically visualize this is to put the application in a quote that delays the compilation of the application. +Then the application dependencies can be placed in an outer quote that contains the quoted application, and we repeat this recursively for dependencies of dependencies. ```scala -'{ val y = 3; (2 + y) + 4 } +'{ // macro library (compilation stage 1) + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + ... + inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + '{ // library using macros (compilation stage 2) + def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) + '{ power2(3.14) /* app (compilation stage 3) */ } + } +} ``` -The body of the first clause, `case Num(n) => Expr(n)`, looks suspicious. `n` -is declared as an `Int`, yet it is converted to an `Expr[Int]` with `Expr()`. -Shouldn’t `n` be quoted? In fact this would not -work since replacing `n` by `'n` in the clause would not be phase -correct. +To make the system more versatile, we allow calling macros in the project where it is defined, with some restrictions. +For example, to compile `Macro.scala` and `Lib.scala` together in the same library. +To this end, we do not follow the simpler syntactic model and rely on semantic information from the source files. +When compiling a source, if we detect a call to a macro that is not compiled yet, we delay the compilation of this source to the following compilation stage. +In the example, we would delay the compilation of `Lib.scala` because it contains a compile-time call to `powerCode`. +Compilation stages are repeated until all sources are compiled, or no progress can be made. +If no progress is made, there was a cyclic dependency between the definition and the use of the macro. +We also need to detect if at runtime the macro depends on sources that have not been compiled yet. +These are detected by executing the macro and checking for JVM linking errors to classes that have not been compiled yet. -The `Expr.apply` method is defined in package `quoted`: +### Run-Time Multi-Stage Programming -```scala -package quoted +See [Run-Time Multi-Stage Programming](./staging.md) -object Expr: - ... - def apply[T: ToExpr](x: T)(using Quotes): Expr[T] = - summon[ToExpr[T]].toExpr(x) -``` +## Safety -This method says that values of types implementing the `ToExpr` type class can be -converted to `Expr` values using `Expr.apply`. +Multi-stage programming is by design statically safe and cross-stage safe. -Scala 3 comes with given instances of `ToExpr` for -several types including `Boolean`, `String`, and all primitive number -types. For example, `Int` values can be converted to `Expr[Int]` -values by wrapping the value in a `Literal` tree node. This makes use -of the underlying tree representation in the compiler for -efficiency. But the `ToExpr` instances are nevertheless not _magic_ -in the sense that they could all be defined in a user program without -knowing anything about the representation of `Expr` trees. For -instance, here is a possible instance of `ToExpr[Boolean]`: +### Static Safety -```scala -given ToExpr[Boolean] with - def toExpr(b: Boolean) = - if b then '{ true } else '{ false } -``` +#### Hygiene +All identifier names are interpreted as symbolic references to the corresponding variable in the context of the quote. +Therefore, while evaluating the quote, it is not possible to accidentally rebind a reference to a new variable with the same textual name. -Once we can lift bits, we can work our way up. For instance, here is a -possible implementation of `ToExpr[Int]` that does not use the underlying -tree machinery: +#### Well-typed +If a quote is well typed, then the generated code is well typed. +This is a simple consequence of tracking the type of each expression. +An `Expr[T]` can only be created from a quote that contains an expression of type `T`. +Conversely, an `Expr[T]` can only be spliced in a location that expects a type `T. +As mentioned before, `Expr` is covariant in its type parameter. +This means that an `Expr[T]` can contain an expression of a subtype of `T`. +When spliced in a location that expects a type `T, these expressions also have a valid type. -```scala -given ToExpr[Int] with - def toExpr(n: Int) = n match - case Int.MinValue => '{ Int.MinValue } - case _ if n < 0 => '{ - ${ toExpr(-n) } } - case 0 => '{ 0 } - case _ if n % 2 == 0 => '{ ${ toExpr(n / 2) } * 2 } - case _ => '{ ${ toExpr(n / 2) } * 2 + 1 } -``` +### Cross-Stage Safety + +#### Level consistency +We define the _staging level_ of some code as the number of quotes minus the number of splices surrounding said code. +Local variables must be defined and used in the same staging level. -Since `ToExpr` is a type class, its instances can be conditional. For example, -a `List` is liftable if its element type is: +It is never possible to access a local variable from a lower staging level as it does not yet exist. ```scala -given [T: ToExpr : Type]: ToExpr[List[T]] with - def toExpr(xs: List[T]) = xs match - case head :: tail => '{ ${ Expr(head) } :: ${ toExpr(tail) } } - case Nil => '{ Nil: List[T] } +def badPower(x: Double, n: Int): Double = + ${ unrolledPowerCode('x, n) } // error: value of `n` not known yet ``` -In the end, `ToExpr` resembles very much a serialization -framework. Like the latter it can be derived systematically for all -collections, case classes and enums. Note also that the synthesis -of _type-tag_ values of type `Type[T]` is essentially the type-level -analogue of lifting. -Using lifting, we can now give the missing definition of `showExpr` in the introductory example: +In the context of macros and _cross-platform portability_, that is, +macros compiled on one machine but potentially executed on another, +we cannot support cross-stage persistence of local variables. +Therefore, local variables can only be accessed at precisely the same staging level in our system. ```scala -def showExpr[T](expr: Expr[T])(using Quotes): Expr[String] = - val code: String = expr.show - Expr(code) +def badPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + // error: `n` potentially not available in the next execution environment + '{ power($x, n) } ``` -That is, the `showExpr` method converts its `Expr` argument to a string (`code`), and lifts -the result back to an `Expr[String]` using `Expr.apply`. -## Lifting Types +The rules are slightly different for global definitions, such as `unrolledPowerCode`. +It is possible to generate code that contains a reference to a _global_ definition such as in `'{ power(2, 4) }`. +This is a limited form of cross-stage persistence that does not impede cross-platform portability, where we refer to the already compiled code for `power`. +Each compilation step will lower the staging level by one while keeping global definitions. +In consequence, we can refer to compiled definitions in macros such as `unrolledPowerCode` in `${ unrolledPowerCode('x, 2) }`. -The previous section has shown that the metaprogramming framework has -to be able to take a type `T` and convert it to a type tree of type -`Type[T]` that can be reified. This means that all free variables of -the type tree refer to types and values defined in the current stage. +We can sumarize level consistency in two rules: + * Local variables can be used only at the same staging level as their definition + * Global variables can be used at any staging level -For a reference to a global class, this is easy: Just issue the fully -qualified name of the class. Members of reifiable types are handled by -just reifying the containing type together with the member name. But -what to do for references to type parameters or local type definitions -that are not defined in the current stage? Here, we cannot construct -the `Type[T]` tree directly, so we need to get it from a recursive -implicit search. For instance, to implement +#### Type consistency +As Scala uses type erasure, generic types will be erased at run-time and hence in any following stage. +To ensure any quoted expression that refers to a generic type `T` does not lose the information it needs, we require a given `Type[T]` in scope. +The `Type[T]` will carry over the non-erased representation of the type into the next phase. +Therefore any generic type used at a higher staging level than its definition will require its `Type`. + +#### Scope extrusion +Within the contents of a splice, it is possible to have a quote that refers to a local variable defined in the outer quote. +If this quote is used within the splice, the variable will be in scope. +However, if the quote is somehow _extruded_ outside the splice, then variables might not be in scope anymore. +Quoted expressions can be extruded using side effects such as mutable state and exceptions. +The following example shows how a quote can be extruded using mutable state. ```scala -summon[Type[List[T]]] +var x: Expr[T] = null +'{ (y: T) => ${ x = 'y; 1 } } +x // has value '{y} but y is not in scope ``` -where `T` is not defined in the current stage, we construct the type constructor -of `List` applied to the splice of the result of searching for a given instance for `Type[T]`: +A second way a variable can be extruded is through the `run` method. +If `run` consumes a quoted variable reference, it will not be in scope anymore. +The result will reference a variable that is defined in the next stage. ```scala -Type.of[ List[ summon[Type[T]].Underlying ] ] +'{ (x: Int) => ${ run('x); ... } } +// evaluates to: '{ (x: Int) => ${ x; ... } 1 ``` -This is exactly the algorithm that Scala 2 uses to search for type tags. -In fact Scala 2's type tag feature can be understood as a more ad-hoc version of -`quoted.Type`. As was the case for type tags, the implicit search for a `quoted.Type` -is handled by the compiler, using the algorithm sketched above. +To catch both scope extrusion scenarios, our system restricts the use of quotes by only allowing a quote to be spliced if it was not extruded from a splice scope. +Unlike level consistency, this is checked at run-time[^4] rather than compile-time to avoid making the static type system too complicated. -## Relationship with `inline` +Each `Quotes` instance contains a unique scope identifier and refers to its parent scope, forming a stack of identifiers. +The parent of the scope of a `Quotes` is the scope of the `Quotes` used to create the enclosing quote. +Top-level splices and `run` create new scope stacks. +Every `Expr` knows in which scope it was created. +When it is spliced, we check that the quote scope is either the same as the splice scope, or a parent scope thereof. -Seen by itself, principled metaprogramming looks more like a framework for -runtime metaprogramming than one for compile-time metaprogramming with macros. -But combined with Scala 3’s `inline` feature it can be turned into a compile-time -system. The idea is that macro elaboration can be understood as a combination of -a macro library and a quoted program. For instance, here’s the `assert` macro -again together with a program that calls `assert`. -```scala -object Macros: +## Staged Lambdas - inline def assert(inline expr: Boolean): Unit = - ${ assertImpl('expr) } +When staging programs in a functional language there are two fundamental abstractions: a staged lambda `Expr[T => U]` and a staging lambda `Expr[T] => Expr[U]`. +The first is a function that will exist in the next stage, whereas the second is a function that exists in the current stage. +It is often convenient to have a mechanism to go from `Expr[T => U]` to `Expr[T] => Expr[U]` and vice versa. - def assertImpl(expr: Expr[Boolean])(using Quotes) = - val failMsg: Expr[String] = Expr("failed assertion: " + expr.show) - '{ if !($expr) then throw new AssertionError($failMsg) } +```scala +def later[T: Type, U: Type](f: Expr[T] => Expr[U]): Expr[T => U] = + '{ (x: T) => ${ f('x) } } -@main def program = - val x = 1 - Macros.assert(x != 0) +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => '{ $f($x) } ``` -Inlining the `assert` function would give the following program: +Both conversions can be performed out of the box with quotes and splices. +But if `f` is a known lambda function, `'{ $f($x) }` will not beta-reduce the lambda in place. +This optimization is performed in a later phase of the compiler. +Not reducing the application immediately can simplify analysis of generated code. +Nevertheless, it is possible to beta-reduce the lambda in place using the `Expr.betaReduce` method. ```scala -@main def program = - val x = 1 - ${ Macros.assertImpl('{ x != 0}) } +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => Expr.betaReduce('{ $f($x) }) ``` -The example is only phase correct because `Macros` is a global value and -as such not subject to phase consistency checking. Conceptually that’s -a bit unsatisfactory. If the PCP is so fundamental, it should be -applicable without the global value exception. But in the example as -given this does not hold since both `assert` and `program` call -`assertImpl` with a splice but no quote. +The `betaReduce` method will beta-reduce the outermost application of the expression if possible (regardless of arity). +If it is not possible to beta-reduce the expression, then it will return the original expression. -However, one could argue that the example is really missing -an important aspect: The macro library has to be compiled in a phase -prior to the program using it, but in the code above, macro -and program are defined together. A more accurate view of -macros would be to have the user program be in a phase after the macro -definitions, reflecting the fact that macros have to be defined and -compiled before they are used. Hence, conceptually the program part -should be treated by the compiler as if it was quoted: +## Staged Constructors +To create new class instances in a later stage, we can create them using factory methods (usually `apply` methods of an `object`), or we can instantiate them with a `new`. +For example, we can write `Some(1)` or `new Some(1)`, creating the same value. +In Scala 3, using the factory method call notation will fall back to a `new` if no `apply` method is found. +We follow the usual staging rules when calling a factory method. +Similarly, when we use a `new C`, the constructor of `C` is implicitly called, which also follows the usual staging rules. +Therefore for an arbitrary known class `C`, we can use both `'{ C(...) }` or `'{ new C(...) }` as constructors. +## Staged Classes +Quoted code can contain any valid expression including local class definitions. +This allows the creation of new classes with specialized implementations. +For example, we can implement a new version of `Runnable` that will perform some optimized operation. ```scala -@main def program = '{ - val x = 1 - ${ Macros.assertImpl('{ x != 0 }) } +def mkRunnable(x: Int)(using Quotes): Expr[Runnable] = '{ + class MyRunnable extends Runnable: + def run(): Unit = ... // generate some custom code that uses `x` + new MyRunnable } ``` -If `program` is treated as a quoted expression, the call to -`Macro.assertImpl` becomes phase correct even if macro library and -program are conceptualized as local definitions. +The quoted class is a local class and its type cannot escape the enclosing quote. +The class must be used inside the quote or an instance of it can be returned using a known interface (`Runnable` in this case). -But what about the call from `assert` to `assertImpl`? Here, we need a -tweak of the typing rules. An inline function such as `assert` that -contains a splice operation outside an enclosing quote is called a -_macro_. Macros are supposed to be expanded in a subsequent phase, -i.e. in a quoted context. Therefore, they are also type checked as if -they were in a quoted context. For instance, the definition of -`assert` is typechecked as if it appeared inside quotes. This makes -the call from `assert` to `assertImpl` phase-correct, even if we -assume that both definitions are local. +## Quote Pattern Matching -The `inline` modifier is used to declare a `val` that is -either a constant or is a parameter that will be a constant when instantiated. This -aspect is also important for macro expansion. - -To get values out of expressions containing constants `Expr` provides the method -`value` (or `valueOrError`). This will convert the `Expr[T]` into a `Some[T]` (or `T`) when the -expression contains value. Otherwise it will return `None` (or emit an error). -To avoid having incidental val bindings generated by the inlining of the `def` -it is recommended to use an inline parameter. To illustrate this, consider an -implementation of the `power` function that makes use of a statically known exponent: +It is sometimes necessary to analyze the structure of the code or decompose the code into its sub-expressions. +A classic example is an embedded DSL, where a macro knows a set of definitions that it can reinterpret while compiling the code (for instance, to perform optimizations). +In the following example, we extend our previous implementation of `powCode` to look into `x` to perform further optimizations. ```scala -inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } - -private def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = - n.value match - case Some(m) => powerCode(x, m) - case None => '{ Math.pow($x, $n.toDouble) } - -private def powerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = - if n == 0 then '{ 1.0 } - else if n == 1 then x - else if n % 2 == 0 then '{ val y = $x * $x; ${ powerCode('y, n / 2) } } - else '{ $x * ${ powerCode(x, n - 1) } } +def fusedPowCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + x match + case '{ power($y, $m) } => // we have (y^m)^n + fusedPowCode(y, '{ $n * $m }) // generate code for y^(n*m) + case _ => + '{ power($x, $n) } ``` -## Scope Extrusion - -Quotes and splices are duals as far as the PCP is concerned. But there is an -additional restriction that needs to be imposed on splices to guarantee -soundness: code in splices must be free of side effects. The restriction -prevents code like this: - -```scala -var x: Expr[T] = ... -'{ (y: T) => ${ x = 'y; 1 } } -``` -This code, if it was accepted, would _extrude_ a reference to a quoted variable -`y` from its scope. This would subsequently allow access to a variable outside the -scope where it is defined, which is likely problematic. The code is clearly -phase consistent, so we cannot use PCP to rule it out. Instead, we postulate a -future effect system that can guarantee that splices are pure. In the absence of -such a system we simply demand that spliced expressions are pure by convention, -and allow for undefined compiler behavior if they are not. This is analogous to -the status of pattern guards in Scala, which are also required, but not -verified, to be pure. +#### Sub-patterns -[Multi-Stage Programming](./staging.md) introduces one additional method where -you can expand code at runtime with a method `run`. There is also a problem with -that invocation of `run` in splices. Consider the following expression: +In quoted patterns, the `$` binds the sub-expression to an expression `Expr` that can be used in that `case` branch. +The contents of `${..}` in a quote pattern are regular Scala patterns. +For example, we can use the `Expr(_)` pattern within the `${..}` to only match if it is a known value and extract it. ```scala -'{ (x: Int) => ${ run('x); 1 } } +def fusedUnrolledPowCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + x match + case '{ power($y, ${Expr(m)}) } => // we have (y^m)^n + fusedUnrolledPowCode(y, n * m) // generate code for y * ... * y + case _ => // ( n*m times ) + unrolledPowerCode(x, n) ``` -This is again phase correct, but will lead us into trouble. Indeed, evaluating -the splice will reduce the expression `run('x)` to `x`. But then the result +These value extraction sub-patterns can be polymorphic using an instance of `FromExpr`. +In the following example, we show the implementation of `OptionFromExpr` which internally uses the `FromExpr[T]` to extract the value using the `Expr(x)` pattern. ```scala -'{ (x: Int) => ${ x; 1 } } +given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with + def unapply(x: Expr[Option[T]])(using Quotes): Option[Option[T]] = + x match + case '{ Some( ${Expr(x)} ) } => Some(Some(x)) + case '{ None } => Some(None) + case _ => None ``` -is no longer phase correct. To prevent this soundness hole it seems easiest to -classify `run` as a side-effecting operation. It would thus be prevented from -appearing in splices. In a base language with side effects we would have to do this -anyway: Since `run` runs arbitrary code it can always produce a side effect if -the code it runs produces one. -## Example Expansion -Assume we have two methods, one `map` that takes an `Expr[Array[T]]` and a -function `f` and one `sum` that performs a sum by delegating to `map`. +#### Closed patterns +Patterns may contain two kinds of references: global references such as the call to the `power` method in `'{ power(...) }`, or references to bindings defined in the pattern such as `x` in `case '{ (x: Int) => x }`. +When extracting an expression from a quote, we need to ensure that we do not extrude any variable from the scope where it is defined. ```scala -object Macros: - - def map[T](arr: Expr[Array[T]], f: Expr[T] => Expr[Unit]) - (using Type[T], Quotes): Expr[Unit] = '{ - var i: Int = 0 - while i < ($arr).length do - val element: T = ($arr)(i) - ${f('element)} - i += 1 - } - - def sum(arr: Expr[Array[Int]])(using Quotes): Expr[Int] = '{ - var sum = 0 - ${ map(arr, x => '{sum += $x}) } - sum - } - - inline def sum_m(arr: Array[Int]): Int = ${sum('arr)} - -end Macros +'{ (x: Int) => x + 1 } match + case '{ (y: Int) => $z } => + // should not match, otherwise: z = '{ x + 1 } ``` -A call to `sum_m(Array(1,2,3))` will first inline `sum_m`: +In this example, we see that the pattern should not match. +Otherwise, any use of the expression `z` would contain an unbound reference to `x`. +To avoid any such extrusion, we only match on a `${..}` if its expression is closed under the definitions within the pattern. +Therefore, the pattern will not match if the expression is not closed. + +#### HOAS patterns +To allow extracting expressions that may contain extruded references we offer a _higher-order abstract syntax_ (HOAS) pattern `$f(y)` (or `$f(y1,...,yn)`). +This pattern will eta-expand the sub-expression with respect to `y` and bind it to `f`. +The lambda arguments will replace the variables that might have been extruded. ```scala -val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) -${_root_.Macros.sum('arr)} +'{ ((x: Int) => x + 1).apply(2) } match + case '{ ((y: Int) => $f(y)).apply($z: Int) } => + // f may contain references to `x` (replaced by `$y`) + // f = (y: Expr[Int]) => '{ $y + 1 } + f(z) // generates '{ 2 + 1 } ``` -then it will splice `sum`: -```scala -val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) +A HOAS pattern `$x(y1,...,yn)` will only match the expression if it does not contain references to variables defined in the pattern that are not in the set `y1,...,yn`. +In other words, the pattern will match if the expression only contains references to variables defined in the pattern that are in `y1,...,yn`. +Note that the HOAS patterns `$x()` are semantically equivalent to closed patterns `$x`. -var sum = 0 -${ map('arr, x => '{sum += $x}) } -sum -``` -then it will inline `map`: +#### Type variables +Expressions may contain types that are not statically known. +For example, an `Expr[List[Int]]` may contain `list.map(_.toInt)` where `list` is a `List` of some type. +To cover all the possible cases we would need to explicitly match `list` on all possible types (`List[Int]`, `List[Int => Int]`, ...). +This is an infinite set of types and therefore pattern cases. +Even if we would know all possible types that a specific program could use, we may still end up with an unmanageable number of cases. +To overcome this, we introduce type variables in quoted patterns, which will match any type. + +In the following example, we show how type variables `t` and `u` match all possible pairs of consecutive calls to `map` on lists. +In the quoted patterns, types named with lower cases are identified as type variables. +This follows the same notation as type variables used in normal patterns. ```scala -val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) +def fuseMapCode(x: Expr[List[Int]]): Expr[List[Int]] = + x match + case '{ ($ls: List[t]).map[u]($f).map[Int]($g) } => + '{ $ls.map($g.compose($f)) } + ... -var sum = 0 -val f = x => '{sum += $x} -${ _root_.Macros.map('arr, 'f)(Type.of[Int])} -sum +fuseMapCode('{ List(1.2).map(f).map(g) }) // '{ List(1.2).map(g.compose(f)) } +fuseMapCode('{ List('a').map(h).map(i) }) // '{ List('a').map(i.compose(h)) } ``` +Variables `f` and `g` are inferred to be of type `Expr[t => u]` and `Expr[u => Int]` respectively. +Subsequently, we can infer `$g.compose($f)` to be of type `Expr[t => Int]` which is the type of the argument of `$ls.map(..)`. -then it will expand and splice inside quotes `map`: +Type variables are abstract types that will be erased; this implies that to reference them in the second quote we need a given `Type[t]` and `Type[u]`. +The quoted pattern will implicitly provide those given types. +At run-time, when the pattern matches, the type of `t` and `u` will be known, and the `Type[t]` and `Type[u]` will contain the precise types in the expression. +As `Expr` is covariant, the statically known type of the expression might not be the actual type. +Type variables can also be used to recover the precise type of the expression. ```scala -val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) +def let(x: Expr[Any])(using Quotes): Expr[Any] = + x match + case '{ $x: t } => + '{ val y: t = $x; y } -var sum = 0 -val f = x => '{sum += $x} -var i: Int = 0 -while i < arr.length do - val element: Int = (arr)(i) - sum += element - i += 1 -sum +let('{1}) // will return a `Expr[Any]` that contains an `Expr[Int]]` ``` -Finally cleanups and dead code elimination: - +While we can define the type variable in the middle of the pattern, their normal form is to define them as a `type` with a lower case name at the start of the pattern. +We use the Scala backquote `` `t` `` naming convention which interprets the string within the backquote as a literal name identifier. +This is typically used when we have names that contain special characters that are not allowed for normal Scala identifiers. +But we use it to explicitly state that this is a reference to that name and not the introduction of a new variable. ```scala -val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) -var sum = 0 -var i: Int = 0 -while i < arr.length do - val element: Int = arr(i) - sum += element - i += 1 -sum + case '{ type t; $x: `t` } => ``` - -## Find implicits within a macro - -Similarly to the `summonFrom` construct, it is possible to make implicit search available -in a quote context. For this we simply provide `scala.quoted.Expr.summon`: +This is a bit more verbose but has some expressivity advantages such as allowing to define bounds on the variables and be able to refer to them several times in any scope of the pattern. ```scala -import scala.collection.immutable.{ TreeSet, HashSet } -inline def setFor[T]: Set[T] = ${ setForExpr[T] } - -def setForExpr[T: Type](using Quotes): Expr[Set[T]] = - Expr.summon[Ordering[T]] match - case Some(ord) => '{ new TreeSet[T]()($ord) } - case _ => '{ new HashSet[T] } + case '{ type t >: List[Int] <: Seq[Int]; $x: `t` } => + case '{ type t; $x: (`t`, `t`) } => ``` -## Relationship with Transparent Inline -[Inline](./inline.md) documents inlining. The code below introduces a transparent -inline method that can calculate either a value of type `Int` or a value of type -`String`. +#### Type patterns +It is possible to only have a type and no expression of that type. +To be able to inspect a type, we introduce quoted type pattern `case '[..] =>`. +It works the same way as a quoted pattern but is restricted to contain a type. +Type variables can be used in quoted type patterns to extract a type. ```scala -transparent inline def defaultOf(inline str: String) = - ${ defaultOfImpl('str) } - -def defaultOfImpl(strExpr: Expr[String])(using Quotes): Expr[Any] = - strExpr.valueOrError match - case "int" => '{1} - case "string" => '{"a"} - -// in a separate file -val a: Int = defaultOf("int") -val b: String = defaultOf("string") - +def empty[T: Type]: Expr[T] = + Type.of[T] match + case '[String] => '{ "" } + case '[List[t]] => '{ List.empty[t] } + ... ``` -## Defining a macro and using it in a single project - -It is possible to define macros and use them in the same project as long as the implementation -of the macros does not have run-time dependencies on code in the file where it is used. -It might still have compile-time dependencies on types and quoted code that refers to the use-site file. +`Type.of[T]` is used to summon the given instance of `Type[T]` in scope, it is equivalent to `summon[Type[T]]`. -To provide this functionality Scala 3 provides a transparent compilation mode where files that -try to expand a macro but fail because the macro has not been compiled yet are suspended. -If there are any suspended files when the compilation ends, the compiler will automatically restart -compilation of the suspended files using the output of the previous (partial) compilation as macro classpath. -In case all files are suspended due to cyclic dependencies the compilation will fail with an error. +#### Type testing and casting +It is important to note that instance checks and casts on `Expr`, such as `isInstanceOf[Expr[T]]` and `asInstanceOf[Expr[T]]`, will only check if the instance is of the class `Expr` but will not be able to check the `T` argument. +These cases will issue a warning at compile-time, but if they are ignored, they can result in unexpected behavior. -## Pattern matching on quoted expressions +These operations can be supported correctly in the system. +For a simple type test it is possible to use the `isExprOf[T]` method of `Expr` to check if it is an instance of that type. +Similarly, it is possible to use `asExprOf[T]` to cast an expression to a given type. +These operations use a given `Type[T]` to work around type erasure. -It is possible to deconstruct or extract values out of `Expr` using pattern matching. -`scala.quoted` contains objects that can help extracting values from `Expr`. +## Sub-Expression Transformation -- `scala.quoted.Expr`/`scala.quoted.Exprs`: matches an expression of a value (or list of values) and returns the value (or list of values). -- `scala.quoted.Const`/`scala.quoted.Consts`: Same as `Expr`/`Exprs` but only works on primitive values. -- `scala.quoted.Varargs`: matches an explicit sequence of expressions and returns them. These sequences are useful to get individual `Expr[T]` out of a varargs expression of type `Expr[Seq[T]]`. - -These could be used in the following way to optimize any call to `sum` that has statically known values. +The system provides a mechanism to transform all sub-expressions of an expression. +This is useful when the sub-expressions we want to transform are deep in the expression. +It is also necessary if the expression contains sub-expressions that cannot be matched using quoted patterns (such as local class definitions). ```scala -inline def sum(inline args: Int*): Int = ${ sumExpr('args) } -private def sumExpr(argsExpr: Expr[Seq[Int]])(using Quotes): Expr[Int] = - argsExpr match - case Varargs(args @ Exprs(argValues)) => - // args is of type Seq[Expr[Int]] - // argValues is of type Seq[Int] - Expr(argValues.sum) // precompute result of sum - case Varargs(argExprs) => // argExprs is of type Seq[Expr[Int]] - val staticSum: Int = argExprs.map(_.value.getOrElse(0)).sum - val dynamicSum: Seq[Expr[Int]] = argExprs.filter(_.value.isEmpty) - dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) - case _ => - '{ $argsExpr.sum } +trait ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] + def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + ... ``` -### Quoted patterns - -Quoted pattens allow deconstructing complex code that contains a precise structure, types or methods. -Patterns `'{ ... }` can be placed in any location where Scala expects a pattern. - -For example - -```scala -optimize { - sum(sum(1, a, 2), 3, b) -} // should be optimized to 6 + a + b -``` +Users can extend the `ExprMap` trait and implement the `transform` method. +This interface is flexible and can implement top-down, bottom-up, or other transformations. ```scala -def sum(args: Int*): Int = args.sum -inline def optimize(inline arg: Int): Int = ${ optimizeExpr('arg) } -private def optimizeExpr(body: Expr[Int])(using Quotes): Expr[Int] = - body match - // Match a call to sum without any arguments - case '{ sum() } => Expr(0) - // Match a call to sum with an argument $n of type Int. - // n will be the Expr[Int] representing the argument. - case '{ sum($n) } => n - // Match a call to sum and extracts all its args in an `Expr[Seq[Int]]` - case '{ sum(${Varargs(args)}: _*) } => sumExpr(args) - case body => body - -private def sumExpr(args1: Seq[Expr[Int]])(using Quotes): Expr[Int] = - def flatSumArgs(arg: Expr[Int]): Seq[Expr[Int]] = arg match - case '{ sum(${Varargs(subArgs)}: _*) } => subArgs.flatMap(flatSumArgs) - case arg => Seq(arg) - val args2 = args1.flatMap(flatSumArgs) - val staticSum: Int = args2.map(_.value.getOrElse(0)).sum - val dynamicSum: Seq[Expr[Int]] = args2.filter(_.value.isEmpty) - dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) +object OptimizeIdentity extends ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + transformChildren(e) match // bottom-up transformation + case '{ identity($x) } => x + case _ => e ``` -### Recovering precise types using patterns +The `transformChildren` method is implemented as a primitive that knows how to reach all the direct sub-expressions and calls `transform` on each one. +The type passed to `transform` is the expected type of this sub-expression in its expression. +For example while transforming `Some(1)` in `'{ val x: Option[Int] = Some(1); ...}` the type will be `Option[Int]` and not `Some[Int]`. +This implies that we can safely transform `Some(1)` into `None`. -Sometimes it is necessary to get a more precise type for an expression. This can be achieved using the following pattern match. +## Staged Implicit Summoning +When summoning implicit arguments using `summon`, we will find the given instances in the current scope. +It is possible to use `summon` to get staged implicit arguments by explicitly staging them first. +In the following example, we can pass an implicit `Ordering[T]` in a macro as an `Expr[Ordering[T]]` to its implementation. +Then we can splice it and give it implicitly in the next stage. ```scala -def f(expr: Expr[Any])(using Quotes) = expr match - case '{ $x: t } => - // If the pattern match succeeds, then there is - // some type `t` such that - // - `x` is bound to a variable of type `Expr[t]` - // - `t` is bound to a new type `t` and a given - // instance `Type[t]` is provided for it - // That is, we have `x: Expr[t]` and `given Type[t]`, - // for some (unknown) type `t`. -``` - -This might be used to then perform an implicit search as in: +inline def treeSetFor[T](using ord: Ordering[T]): Set[T] = + ${ setExpr[T](using 'ord) } -```scala -extension (inline sc: StringContext) - inline def showMe(inline args: Any*): String = ${ showMeExpr('sc, 'args) } - -private def showMeExpr(sc: Expr[StringContext], argsExpr: Expr[Seq[Any]])(using Quotes): Expr[String] = - import quotes.reflect.report - argsExpr match - case Varargs(argExprs) => - val argShowedExprs = argExprs.map { - case '{ $arg: tp } => - Expr.summon[Show[tp]] match - case Some(showExpr) => - '{ $showExpr.show($arg) } - case None => - report.error(s"could not find implicit for ${Type.show[Show[tp]]}", arg); '{???} - } - val newArgsExpr = Varargs(argShowedExprs) - '{ $sc.s($newArgsExpr: _*) } - case _ => - // `new StringContext(...).showMeExpr(args: _*)` not an explicit `showMeExpr"..."` - report.error(s"Args must be explicit", argsExpr) - '{???} - -trait Show[-T]: - def show(x: T): String - -// in a different file -given Show[Boolean] with - def show(b: Boolean) = "boolean!" - -println(showMe"${true}") +def setExpr[T:Type](using ord: Expr[Ordering[T]])(using Quotes): Expr[Set[T]] = + '{ given Ordering[T] = $ord; new TreeSet[T]() } ``` -### Open code patterns +We pass it as an implicit `Expr[Ordering[T]]` because there might be intermediate methods that can pass it along implicitly. -Quoted pattern matching also provides higher-order patterns to match open terms. If a quoted term contains a definition, -then the rest of the quote can refer to this definition. +An alternative is to summon implicit values in the scope where the macro is invoked. +Using the `Expr.summon` method we get an optional expression containing the implicit instance. +This provides the ability to search for implicit instances conditionally. ```scala -'{ - val x: Int = 4 - x * x -} +def summon[T: Type](using Quotes): Option[Expr[T]] ``` -To match such a term we need to match the definition and the rest of the code, but we need to explicitly state that the rest of the code may refer to this definition. - ```scala -case '{ val y: Int = $x; $body(y): Int } => -``` - -Here `$x` will match any closed expression while `$body(y)` will match an expression that is closed under `y`. Then -the subexpression of type `Expr[Int]` is bound to `body` as an `Expr[Int => Int]`. The extra argument represents the references to `y`. Usually this expression is used in combination with `Expr.betaReduce` to replace the extra argument. +inline def setFor[T]: Set[T] = + ${ setForExpr[T] } -```scala -inline def eval(inline e: Int): Int = ${ evalExpr('e) } - -private def evalExpr(e: Expr[Int])(using Quotes): Expr[Int] = e match - case '{ val y: Int = $x; $body(y): Int } => - // body: Expr[Int => Int] where the argument represents - // references to y - evalExpr(Expr.betaReduce('{$body(${evalExpr(x)})})) - case '{ ($x: Int) * ($y: Int) } => - (x.value, y.value) match - case (Some(a), Some(b)) => Expr(a * b) - case _ => e - case _ => e +def setForExpr[T: Type]()(using Quotes): Expr[Set[T]] = + Expr.summon[Ordering[T]] match + case Some(ord) => + '{ new TreeSet[T]()($ord) } + case _ => + '{ new HashSet[T] } ``` -```scala -eval { // expands to the code: (16: Int) - val x: Int = 4 - x * x -} -``` +## More details -We can also close over several bindings using `$b(a1, a2, ..., an)`. -To match an actual application we can use braces on the function part `${b}(a1, a2, ..., an)`. +* [Specification](./macros-spec.md) +* Scalable Metaprogramming in Scala 3[^1] -## More details -[More details](./macros-spec.md) +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) +[^3]: Implemented in the Scala 3 Dotty project https://github.com/lampepfl/dotty. sbt library dependency `"org.scala-lang" %% "scala3-staging" % scalaVersion.value` +[^4]: Using the `-Xcheck-macros` compiler flag diff --git a/docs/_docs/reference/metaprogramming/metaprogramming.md b/docs/_docs/reference/metaprogramming/metaprogramming.md index 3bce2d7c922e..af7206eff34e 100644 --- a/docs/_docs/reference/metaprogramming/metaprogramming.md +++ b/docs/_docs/reference/metaprogramming/metaprogramming.md @@ -39,7 +39,7 @@ introduce the following fundamental facilities: representation of code. They can be parameterized and composed using splices, but their structure cannot be analyzed from the outside. TASTy reflection gives a way to analyze code structure by partly revealing the representation type of a piece of code in a standard API. The representation - type is a form of typed abstract syntax tree, which gives rise to the `TASTy` + type is a form of **t**yped **a**bstract **s**yntax **t**ree, which gives rise to the `TASTy` moniker. 6. [TASTy Inspection](./tasty-inspect.md) Typed abstract syntax trees are serialized diff --git a/docs/_docs/reference/metaprogramming/reflection.md b/docs/_docs/reference/metaprogramming/reflection.md index b2d492657a4e..68cb7dafcfbb 100644 --- a/docs/_docs/reference/metaprogramming/reflection.md +++ b/docs/_docs/reference/metaprogramming/reflection.md @@ -98,10 +98,11 @@ def macroImpl()(quotes: Quotes): Expr[Unit] = `quotes.reflect` contains three facilities for tree traversal and transformation. -`TreeAccumulator` ties the knot of a traversal. By calling `foldOver(x, tree)(owner)` -we can dive into the `tree` node and start accumulating values of type `X` (e.g., -of type `List[Symbol]` if we want to collect symbols). The code below, for -example, collects the `val` definitions in the tree. +`TreeAccumulator[X]` allows you to traverse the tree and aggregate data of type `X` along the way, by overriding its method `foldTree(x: X, tree: Tree)(owner: Symbol): X`. + +`foldOverTree(x: X, tree: Tree)(owner: Symbol): X` calls `foldTree` on each children of `tree` (using `fold` to give each call the value of the previous one). + +The code below, for example, collects the `val` definitions in the tree. ```scala def collectPatternVariables(tree: Tree)(using ctx: Context): List[Symbol] = @@ -115,12 +116,15 @@ def collectPatternVariables(tree: Tree)(using ctx: Context): List[Symbol] = acc(Nil, tree) ``` -A `TreeTraverser` extends a `TreeAccumulator` and performs the same traversal -but without returning any value. Finally, a `TreeMap` performs a transformation. +A `TreeTraverser` extends a `TreeAccumulator[Unit]` and performs the same traversal +but without returning any value. + +`TreeMap` transforms trees along the traversal, through overloading its methods it is possible to transform only trees of specific types, for example `transformStatement` only transforms `Statement`s. + #### ValDef.let -`quotes.reflect.ValDef` also offers a method `let` that allows us to bind the `rhs` (right-hand side) to a `val` and use it in `body`. +The object `quotes.reflect.ValDef` also offers a method `let` that allows us to bind the `rhs` (right-hand side) to a `val` and use it in `body`. Additionally, `lets` binds the given `terms` to names and allows to use them in the `body`. Their type definitions are shown below: diff --git a/docs/_docs/reference/metaprogramming/simple-smp.md b/docs/_docs/reference/metaprogramming/simple-smp.md index 2ba0155ad329..61b062f55b87 100644 --- a/docs/_docs/reference/metaprogramming/simple-smp.md +++ b/docs/_docs/reference/metaprogramming/simple-smp.md @@ -23,7 +23,7 @@ replace evaluation contexts with contextual typing rules. While this is more verbose, it makes it easier to set up the meta theory. ## Syntax -``` +```ebnf Terms t ::= x variable (x: T) => t lambda t t application diff --git a/docs/_docs/reference/metaprogramming/staging.md b/docs/_docs/reference/metaprogramming/staging.md index e74d491402b5..1c154e09f50e 100644 --- a/docs/_docs/reference/metaprogramming/staging.md +++ b/docs/_docs/reference/metaprogramming/staging.md @@ -1,6 +1,6 @@ --- layout: doc-page -title: "Runtime Multi-Stage Programming" +title: "Run-Time Multi-Stage Programming" nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/staging.html --- @@ -60,7 +60,7 @@ impose the following restrictions on the use of splices. The framework as discussed so far allows code to be staged, i.e. be prepared to be executed at a later stage. To run that code, there is another method in class `Expr` called `run`. Note that `$` and `run` both map from `Expr[T]` -to `T` but only `$` is subject to the [PCP](./macros.md#the-phase-consistency-principle), whereas `run` is just a normal method. +to `T` but only `$` is subject to [Cross-Stage Safety](./macros.md#cross-stage-safety), whereas `run` is just a normal method. `scala.quoted.staging.run` provides a `Quotes` that can be used to show the expression in its scope. On the other hand `scala.quoted.staging.withQuotes` provides a `Quotes` without evaluating the expression. diff --git a/docs/_docs/reference/new-types/dependent-function-types-spec.md b/docs/_docs/reference/new-types/dependent-function-types-spec.md index f3237ddf7b9a..f603200b1ae0 100644 --- a/docs/_docs/reference/new-types/dependent-function-types-spec.md +++ b/docs/_docs/reference/new-types/dependent-function-types-spec.md @@ -8,7 +8,7 @@ Initial implementation in [PR #3464](https://github.com/lampepfl/dotty/pull/3464 ## Syntax -``` +```ebnf FunArgTypes ::= InfixType | ‘(’ [ FunArgType {',' FunArgType } ] ‘)’ | ‘(’ TypedFunParam {',' TypedFunParam } ‘)’ diff --git a/docs/_docs/reference/new-types/intersection-types-spec.md b/docs/_docs/reference/new-types/intersection-types-spec.md index 346c57c004f0..8d332fc6ed29 100644 --- a/docs/_docs/reference/new-types/intersection-types-spec.md +++ b/docs/_docs/reference/new-types/intersection-types-spec.md @@ -12,7 +12,7 @@ with the usual precedence and subject to usual resolving rules. Unless shadowed by another definition, it resolves to the type `scala.&`, which acts as a type alias to an internal representation of intersection types. -``` +```ebnf Type ::= ...| InfixType InfixType ::= RefinedType {id [nl] RefinedType} ``` diff --git a/docs/_docs/reference/new-types/match-types.md b/docs/_docs/reference/new-types/match-types.md index 9fcee09c34f5..d646dd11880b 100644 --- a/docs/_docs/reference/new-types/match-types.md +++ b/docs/_docs/reference/new-types/match-types.md @@ -83,6 +83,12 @@ following conditions are met: and these types are `=:=` to their corresponding type patterns in the match type +So you know, while the case body will be expected to have the type on the right-hand +side of the corresponding match type case, that doesn't imply the match type argument +is constrained. Using the example, the last case body must conform to X, but that +doesn't constrain X to be AnyVal, and therefore a LeafElem[X] inside the body wouldn't +reduce; it would remain stuck, and as such just an abstract type. + ## Representation of Match Types The internal representation of a match type diff --git a/docs/_docs/reference/new-types/new-types.md b/docs/_docs/reference/new-types/new-types.md index 84c157495d6f..8eb1d7b3bd1b 100644 --- a/docs/_docs/reference/new-types/new-types.md +++ b/docs/_docs/reference/new-types/new-types.md @@ -1,7 +1,7 @@ --- layout: index title: "New Types" -nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/index.html +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/index.html --- This chapter documents the new types introduced in Scala 3. diff --git a/docs/_docs/reference/new-types/type-lambdas-spec.md b/docs/_docs/reference/new-types/type-lambdas-spec.md index 52f88dab4217..7f7053a13ddd 100644 --- a/docs/_docs/reference/new-types/type-lambdas-spec.md +++ b/docs/_docs/reference/new-types/type-lambdas-spec.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/type-lambdas-s ## Syntax -``` +```ebnf Type ::= ... | TypeParamClause ‘=>>’ Type TypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ TypeParam ::= {Annotation} (id [HkTypeParamClause] | ‘_’) TypeBounds @@ -103,9 +103,9 @@ type O2[X] = List[X] ``` would be treated as covariant, `X` is used covariantly on its right-hand side. -**Note**: The decision to treat `Nothing` as universal bottom type is provisional, and might be changed after further discussion. +**Note:** The decision to treat `Nothing` as universal bottom type is provisional, and might be changed after further discussion. -**Note**: Scala 2 and 3 differ in that Scala 2 also treats `Any` as universal top-type. This is not done in Scala 3. See also the discussion on [kind polymorphism](../other-new-features/kind-polymorphism.md) +**Note:** Scala 2 and 3 differ in that Scala 2 also treats `Any` as universal top-type. This is not done in Scala 3. See also the discussion on [kind polymorphism](../other-new-features/kind-polymorphism.md) ## Curried Type Parameters diff --git a/docs/_docs/reference/new-types/union-types-spec.md b/docs/_docs/reference/new-types/union-types-spec.md index d250d3f11713..1093631e7c63 100644 --- a/docs/_docs/reference/new-types/union-types-spec.md +++ b/docs/_docs/reference/new-types/union-types-spec.md @@ -72,6 +72,10 @@ a non-union type, for this purpose we define the _join_ of a union type `T1 | `T1`,...,`Tn`. Note that union types might still appear as type arguments in the resulting type, this guarantees that the join is always finite. +The _visible join_ of a union type is its join where all operands of the intersection that +are instances of [transparent](../other-new-features/transparent-traits.md) traits or classes are removed. + + ### Example Given @@ -80,31 +84,50 @@ Given trait C[+T] trait D trait E -class A extends C[A] with D -class B extends C[B] with D with E +transparent trait X +class A extends C[A], D, X +class B extends C[B], D, E, X ``` -The join of `A | B` is `C[A | B] & D` +The join of `A | B` is `C[A | B] & D & X` and the visible join of `A | B` is `C[A | B] & D`. + +## Hard and Soft Union Types + +We distinguish between hard and soft union types. A _hard_ union type is a union type that's explicitly +written in the source. For instance, in +```scala +val x: Int | String = ... +``` +`Int | String` would be a hard union type. A _soft_ union type is a type that arises from type checking +an alternative of expressions. For instance, the type of the expression +```scala +val x = 1 +val y = "abc" +if cond then x else y +``` +is the soft unon type `Int | String`. Similarly for match expressions. The type of +```scala +x match + case 1 => x + case 2 => "abc" + case 3 => List(1, 2, 3) +``` +is the soft union type `Int | "abc" | List[Int]`. + ## Type inference When inferring the result type of a definition (`val`, `var`, or `def`) and the -type we are about to infer is a union type, then we replace it by its join. +type we are about to infer is a soft union type, then we replace it by its visible join, +provided it is not empty. Similarly, when instantiating a type argument, if the corresponding type parameter is not upper-bounded by a union type and the type we are about to -instantiate is a union type, we replace it by its join. This mirrors the +instantiate is a soft union type, we replace it by its visible join, provided it is not empty. +This mirrors the treatment of singleton types which are also widened to their underlying type unless explicitly specified. The motivation is the same: inferring types which are "too precise" can lead to unintuitive typechecking issues later on. -**Note:** Since this behavior limits the usability of union types, it might -be changed in the future. For example by not widening unions that have been -explicitly written down by the user and not inferred, or by not widening a type -argument when the corresponding type parameter is covariant. - -See [PR #2330](https://github.com/lampepfl/dotty/pull/2330) and -[Issue #4867](https://github.com/lampepfl/dotty/issues/4867) for further discussions. - ### Example ```scala diff --git a/docs/_docs/reference/new-types/union-types.md b/docs/_docs/reference/new-types/union-types.md index ebc4565e36fb..978e08649d9e 100644 --- a/docs/_docs/reference/new-types/union-types.md +++ b/docs/_docs/reference/new-types/union-types.md @@ -8,8 +8,9 @@ A union type `A | B` has as values all values of type `A` and also all values of ```scala -case class UserName(name: String) -case class Password(hash: Hash) +trait ID +case class UserName(name: String) extends ID +case class Password(hash: Hash) extends ID def help(id: UserName | Password) = val user = id match @@ -22,7 +23,10 @@ Union types are duals of intersection types. `|` is _commutative_: `A | B` is the same type as `B | A`. The compiler will assign a union type to an expression only if such a -type is explicitly given. This can be seen in the following [REPL](https://docs.scala-lang.org/overviews/repl/overview.html) transcript: +type is explicitly given or if the common supertype of all alternatives is [transparent](../other-new-features/transparent-traits.md). + + +This can be seen in the following [REPL](https://docs.scala-lang.org/overviews/repl/overview.html) transcript: ```scala scala> val password = Password(123) @@ -32,15 +36,36 @@ scala> val name = UserName("Eve") val name: UserName = UserName(Eve) scala> if true then name else password -val res2: Object = UserName(Eve) +val res1: ID = UserName(Eve) scala> val either: Password | UserName = if true then name else password -val either: Password | UserName = UserName(Eve) +val either: UserName | Password = UserName(Eve) ``` - -The type of `res2` is `Object & Product`, which is a supertype of -`UserName` and `Password`, but not the least supertype `Password | -UserName`. If we want the least supertype, we have to give it +The type of `res1` is `ID`, which is a supertype of +`UserName` and `Password`, but not the least supertype `UserName | Password`. +If we want the least supertype, we have to give it explicitly, as is done for the type of `either`. +The inference behavior changes if the common supertrait `ID` is declared `transparent`: +```scala +transparent trait ID +``` +In that case the union type is not widened. +```scala +scala> if true then name else password +val res2: UserName | Password = UserName(Eve) +``` +The more precise union type is also inferred if `UserName` and `Password` are declared without an explicit +parent, since in that case their implied superclass is `Object`, which is among the classes that are +assumed to be transparent. See [Transparent Traits and Classes](../other-new-features/transparent-traits.md) +for a list of such classes. +```scala +case class UserName(name: String) +case class Password(hash: Hash) + +scala> if true then UserName("Eve") else Password(123) +val res3: UserName | Password = UserName(Eve) +``` + + [More details](./union-types-spec.md) diff --git a/docs/_docs/reference/other-new-features/creator-applications.md b/docs/_docs/reference/other-new-features/creator-applications.md index 81f09d897955..8b1de02b2f25 100644 --- a/docs/_docs/reference/other-new-features/creator-applications.md +++ b/docs/_docs/reference/other-new-features/creator-applications.md @@ -47,8 +47,12 @@ be selected with `apply` (or be applied to arguments, in which case the `apply` inserted). Constructor proxies are also not allowed to shadow normal definitions. That is, -if an identifier resolves to a constructor proxy, and the same identifier is also -defined or imported in some other scope, an ambiguity is reported. +an ambiguity is reported, if + + - an identifier resolves to a constructor proxy, + - the same identifier is also defined or imported in some other scope, + - the other reference can be applied to a (possibly empty) parameter list. That + is, it refers either to a method or to a value containing an apply method as member. ## Motivation diff --git a/docs/_docs/reference/other-new-features/experimental-defs.md b/docs/_docs/reference/other-new-features/experimental-defs.md index d110c8bc079b..88815ad1e136 100644 --- a/docs/_docs/reference/other-new-features/experimental-defs.md +++ b/docs/_docs/reference/other-new-features/experimental-defs.md @@ -216,6 +216,7 @@ Experimental definitions can only be referenced in an experimental scope. Experi
Example 1 + ```scala import scala.annotation.experimental @@ -241,6 +242,7 @@ Experimental definitions can only be referenced in an experimental scope. Experi } } ``` +
5. Annotations of an experimental definition are in experimental scopes. Examples: @@ -268,13 +270,6 @@ Can use the `-Yno-experimental` compiler flag to disable it and run as a proper In any other situation, a reference to an experimental definition will cause a compilation error. -## Experimental inheritance - -All subclasses of an experimental `class` or `trait` must be marked as [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) even if they are in an experimental scope. -Anonymous classes and SAMs of experimental classes are considered experimental. - -We require explicit annotations to make sure we do not have completion or cycles issues with nested classes. This restriction could be relaxed in the future. - ## Experimental overriding For an overriding member `M` and overridden member `O`, if `O` is non-experimental then `M` must be non-experimental. diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md index 40e2ad9df248..e8482cb343d9 100644 --- a/docs/_docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -176,7 +176,7 @@ extension (x: String) ## Syntax changes: -``` +```ebnf TemplateStat ::= ... | Export TopStat ::= ... @@ -201,16 +201,16 @@ Consider the following example: ```scala class B { val c: Int } -object a { val b = new B } -export a.* +object A { val b = new B } +export A.* export b.* ``` -Is the `export b.*` clause legal? If yes, what does it export? Is it equivalent to `export a.b.*`? What about if we swap the last two clauses? +Is the `export b.*` clause legal? If yes, what does it export? Is it equivalent to `export A.b.*`? What about if we swap the last two clauses? ``` export b.* -export a.* +export A.* ``` To avoid tricky questions like these, we fix the elaboration order of exports as follows. diff --git a/docs/_docs/reference/other-new-features/indentation.md b/docs/_docs/reference/other-new-features/indentation.md index e931030ab696..f53bf0995727 100644 --- a/docs/_docs/reference/other-new-features/indentation.md +++ b/docs/_docs/reference/other-new-features/indentation.md @@ -174,18 +174,72 @@ The syntax changes allowing this are as follows: Define for an arbitrary sequence of tokens or non-terminals `TS`: -``` +```ebnf :<<< TS >>> ::= ‘{’ TS ‘}’ | ``` Then the grammar changes as follows: -``` +```ebnf TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> Packaging ::= ‘package’ QualId :<<< TopStats >>> ``` +## Optional Braces for Method Arguments + +Starting with Scala 3.3, a `` token is also recognized where a function argument would be expected. Examples: + +```scala +times(10): + println("ah") + println("ha") +``` + +or + +```scala +credentials `++`: + val file = Path.userHome / ".credentials" + if file.exists + then Seq(Credentials(file)) + else Seq() +``` + +or + +```scala +xs.map: + x => + val y = x - 1 + y * y +``` +What's more, a `:` in these settings can also be followed on the same line by the parameter part and arrow of a lambda. So the last example could be compressed to this: + +```scala +xs.map: x => + val y = x - 1 + y * y +``` +and the following would also be legal: +```scala +xs.foldLeft(0): (x, y) => + x + y +``` + +The grammar changes for optional braces around arguments are as follows. + +```ebnf +SimpleExpr ::= ... + | SimpleExpr ColonArgument +InfixExpr ::= ... + | InfixExpr id ColonArgument +ColonArgument ::= colon [LambdaStart] + indent (CaseClauses | Block) outdent +LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + | HkTypeParamClause ‘=>’ +``` + ## Spaces vs Tabs Indentation prefixes can consist of spaces and/or tabs. Indentation widths are the indentation prefixes themselves, ordered by the string prefix relation. So, so for instance "2 tabs, followed by 4 spaces" is strictly less than "2 tabs, followed by 5 spaces", but "2 tabs, followed by 4 spaces" is incomparable to "6 tabs" or to "4 spaces, followed by 2 tabs". It is an error if the indentation width of some line is incomparable with the indentation width of the region that's current at that point. To avoid such errors, it is a good idea not to mix spaces and tabs in the same source file. @@ -376,7 +430,7 @@ If none of these criteria apply, it's often better to not use an end marker sinc ### Syntax -``` +```ebnf EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ @@ -448,62 +502,3 @@ indented regions where possible. When invoked with options `-rewrite -no-indent` The `-indent` option only works on [new-style syntax](./control-syntax.md). So to go from old-style syntax to new-style indented code one has to invoke the compiler twice, first with options `-rewrite -new-syntax`, then again with options `-rewrite -indent`. To go in the opposite direction, from indented code to old-style syntax, it's `-rewrite -no-indent`, followed by `-rewrite -old-syntax`. -## Variant: Indentation Marker `:` for Arguments - -Generally, the possible indentation regions coincide with those regions where braces `{...}` are also legal, no matter whether the braces enclose an expression or a set of definitions. There is one exception, though: Arguments to functions can be enclosed in braces but they cannot be simply indented instead. Making indentation always significant for function arguments would be too restrictive and fragile. - -To allow such arguments to be written without braces, a variant of the indentation scheme is implemented under language import -```scala -import language.experimental.fewerBraces -``` -In this variant, a `` token is also recognized where function argument would be expected. Examples: - -```scala -times(10): - println("ah") - println("ha") -``` - -or - -```scala -credentials `++`: - val file = Path.userHome / ".credentials" - if file.exists - then Seq(Credentials(file)) - else Seq() -``` - -or - -```scala -xs.map: - x => - val y = x - 1 - y * y -``` -What's more, a `:` in these settings can also be followed on the same line by the parameter part and arrow of a lambda. So the last example could be compressed to this: - -```scala -xs.map: x => - val y = x - 1 - y * y -``` -and the following would also be legal: -```scala -xs.foldLeft(0): (x, y) => - x + y -``` - -The grammar changes for this variant are as follows. - -``` -SimpleExpr ::= ... - | SimpleExpr ColonArgument -InfixExpr ::= ... - | InfixExpr id ColonArgument -ColonArgument ::= colon [LambdaStart] - indent (CaseClauses | Block) outdent -LambdaStart ::= FunParams (‘=>’ | ‘?=>’) - | HkTypeParamClause ‘=>’ -``` \ No newline at end of file diff --git a/docs/_docs/reference/other-new-features/kind-polymorphism.md b/docs/_docs/reference/other-new-features/kind-polymorphism.md index 8f0172c4c04b..e452ee8384f9 100644 --- a/docs/_docs/reference/other-new-features/kind-polymorphism.md +++ b/docs/_docs/reference/other-new-features/kind-polymorphism.md @@ -43,5 +43,5 @@ It is declared `abstract` and `final`, so it can be neither instantiated nor ext `AnyKind` plays a special role in Scala's subtype system: It is a supertype of all other types no matter what their kind is. It is also assumed to be kind-compatible with all other types. Furthermore, `AnyKind` is treated as a higher-kinded type (so it cannot be used as a type of values), but at the same time it has no type parameters (so it cannot be instantiated). -**Note**: This feature is considered experimental but stable and it can be disabled under compiler flag +**Note:** This feature is considered experimental but stable and it can be disabled under compiler flag (i.e. `-Yno-kind-polymorphism`). diff --git a/docs/_docs/reference/other-new-features/opaques-details.md b/docs/_docs/reference/other-new-features/opaques-details.md index d7305a249089..d285ec8e8325 100644 --- a/docs/_docs/reference/other-new-features/opaques-details.md +++ b/docs/_docs/reference/other-new-features/opaques-details.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/opaqu ## Syntax -``` +```ebnf Modifier ::= ... | ‘opaque’ ``` @@ -40,7 +40,7 @@ object o: In this case we have inside the object (also for non-opaque types) that `o.T` is equal to `T` or its expanded form `o.this.T`. Equality is understood here as mutual subtyping, i.e. -`o.T <: o.this.T` and `o.this.T <: T`. Furthermore, we have by the rules of opaque type aliases +`o.T <: o.this.T` and `o.this.T <: o.T`. Furthermore, we have by the rules of opaque type aliases that `o.this.T` equals `R`. The two equalities compose. That is, inside `o`, it is also known that `o.T` is equal to `R`. This means the following code type-checks: @@ -65,7 +65,7 @@ opaque type G = [T] =>> List[T] but the following are not: ```scala opaque type BadF[T] = [U] =>> (T, U) -opaque type BadG = [T] =>> [U] => (T, U) +opaque type BadG = [T] =>> [U] =>> (T, U) ``` ## Translation of Equality diff --git a/docs/_docs/reference/other-new-features/opaques.md b/docs/_docs/reference/other-new-features/opaques.md index d8c4d37bcb3b..567b51098016 100644 --- a/docs/_docs/reference/other-new-features/opaques.md +++ b/docs/_docs/reference/other-new-features/opaques.md @@ -174,6 +174,6 @@ val z = l2(3.1) l1.mul(x, y) // type checks l1.mul(x, z) // error: found l2.Logarithm, required l1.Logarithm ``` -In general, one can think of an opaque type as being only transparent in the scope of `private[this]`. +In general, one can think of an opaque type as being only transparent in the scope of `private[this]` (unless the type is a top level definition - in this case, it's transparent only within the file it's defined in). [More details](opaques-details.md) diff --git a/docs/_docs/reference/other-new-features/parameter-untupling-spec.md b/docs/_docs/reference/other-new-features/parameter-untupling-spec.md index e5165550fc0d..fd462dd610c8 100644 --- a/docs/_docs/reference/other-new-features/parameter-untupling-spec.md +++ b/docs/_docs/reference/other-new-features/parameter-untupling-spec.md @@ -4,37 +4,7 @@ title: "Parameter Untupling - More Details" nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/parameter-untupling-spec.html --- -## Motivation -Say you have a list of pairs - -```scala -val xs: List[(Int, Int)] -``` - -and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to their sum. -Previously, the best way to do this was with a pattern-matching decomposition: - -```scala -xs.map { - case (x, y) => x + y -} -``` -While correct, this is inconvenient. Instead, we propose to write it the following way: - -```scala -xs.map { - (x, y) => x + y -} -``` - -or, equivalently: - -```scala -xs.map(_ + _) -``` - -Generally, a function value with `n > 1` parameters can be converted to a function with tupled arguments if the expected type is a unary function type of the form `((T_1, ..., T_n)) => U`. ## Type Checking diff --git a/docs/_docs/reference/other-new-features/parameter-untupling.md b/docs/_docs/reference/other-new-features/parameter-untupling.md index fcc1fa11d519..e1e7afcad8fe 100644 --- a/docs/_docs/reference/other-new-features/parameter-untupling.md +++ b/docs/_docs/reference/other-new-features/parameter-untupling.md @@ -57,12 +57,13 @@ The function value must be explicitly tupled, rather than the parameters untuple xs.map(combiner.tupled) ``` -A conversion may be provided in user code: +Though strongly discouraged, to have the same effect, an implicit conversion may be provided in user code: ```scala import scala.language.implicitConversions -transparent inline implicit def `fallback untupling`(f: (Int, Int) => Int): ((Int, Int)) => Int = - p => f(p._1, p._2) // use specialized apply instead of unspecialized `tupled` + +transparent inline given `fallback untupling`: Conversion[(Int, Int) => Int, ((Int, Int)) => Int] = _.tupled + xs.map(combiner) ``` diff --git a/docs/_docs/reference/other-new-features/targetName.md b/docs/_docs/reference/other-new-features/targetName.md index 63c4cf1ec0df..717ce4247a1f 100644 --- a/docs/_docs/reference/other-new-features/targetName.md +++ b/docs/_docs/reference/other-new-features/targetName.md @@ -93,7 +93,7 @@ The relevant overriding rules can be summarized as follows: - If two members override, then both their erased names and their types must be the same. As usual, any overriding relationship in the generated code must also -be present in the original code. So the following example would also be in error: +be present in the original code. So the following example would also be an error: ```scala import annotation.targetName diff --git a/docs/_docs/reference/other-new-features/trait-parameters.md b/docs/_docs/reference/other-new-features/trait-parameters.md index c704e73ce9b8..e46ba455c7b3 100644 --- a/docs/_docs/reference/other-new-features/trait-parameters.md +++ b/docs/_docs/reference/other-new-features/trait-parameters.md @@ -79,7 +79,7 @@ The definition of `F` in the last line is implicitly expanded to class F(using iname: ImpliedName) extends Object, ImpliedGreeting(using iname), - ImpliedFormalGreeting(using iname) + ImpliedFormalGreeting ``` Note the inserted reference to the super trait `ImpliedGreeting`, which was not mentioned explicitly. diff --git a/docs/_docs/reference/other-new-features/transparent-traits.md b/docs/_docs/reference/other-new-features/transparent-traits.md index 699ce0b9ddd8..b930ffbfde00 100644 --- a/docs/_docs/reference/other-new-features/transparent-traits.md +++ b/docs/_docs/reference/other-new-features/transparent-traits.md @@ -1,6 +1,6 @@ --- layout: doc-page -title: "Transparent Traits" +title: "Transparent Traits and Classes" nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/transparent-traits.html --- @@ -20,12 +20,13 @@ val x = Set(if condition then Val else Var) Here, the inferred type of `x` is `Set[Kind & Product & Serializable]` whereas one would have hoped it to be `Set[Kind]`. The reasoning for this particular type to be inferred is as follows: -- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. -- A union type is widened in type inference to the least supertype that is not a union type. - In the example, this type is `Kind & Product & Serializable` since all three traits are traits of both `Val` and `Var`. +- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. This union type is treated as "soft", which means it was not explicitly written in the source program, but came from forming an upper bound of the types of +some alternatives. +- A soft union type is widened in type inference to the least product of class or trait types that is a supertype of the union type. + In the example, this type is `Kind & Product & Serializable` since all three traits are super-traits of both `Val` and `Var`. So that type becomes the inferred element type of the set. -Scala 3 allows one to mark a mixin trait as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: +Scala 3 allows one to mark a trait or class as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: ```scala transparent trait S @@ -38,13 +39,40 @@ val x = Set(if condition then Val else Var) Now `x` has inferred type `Set[Kind]`. The common transparent trait `S` does not appear in the inferred type. -## Transparent Traits +In the previous example, one could also declare `Kind` as `transparent`: +```scala +transparent trait Kind +``` +The widened union type of `if condition then Val else Var` would then +_only_ contain the transparent traits `Kind` and `S`. In this case, +the widening is not performed at all, so `x` would have type `Set[Val | Var]`. + +The root classes and traits `Any`, `AnyVal`, `Object`, and `Matchable` are +considered to be transparent. This means that an expression such +as +```scala +if condition then 1 else "hello" +``` +will have type `Int | String` instead of the widened type `Any`. + -The traits [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.html), [`java.io.Serializable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/Serializable.html) and [`java.lang.Comparable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Comparable.html) -are treated automatically as transparent. Other traits are turned into transparent traits using the modifier `transparent`. Scala 2 traits can also be made transparent -by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interoperability is no longer needed. -Typically, transparent traits are traits +## Which Traits and Classes Are Transparent? + +Traits and classes are declared transparent by adding the modifier `transparent`. Scala 2 traits and classes can also be declared transparent by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interoperability is no longer needed. + +The following classes and traits are automatically treated as transparent: +```scala + scala.Any + scala.AnyVal + scala.Matchable + scala.Product + java.lang.Object + java.lang.Comparable + java.io.Serializable +``` + +Typically, transparent types other than the root classes are traits that influence the implementation of inheriting classes and traits that are not usually used as types by themselves. Two examples from the standard collection library are: - [`IterableOps`](https://scala-lang.org/api/3.x/scala/collection/IterableOps.html), which provides method implementations for an [`Iterable`](https://scala-lang.org/api/3.x/scala/collection/Iterable.html). @@ -55,7 +83,10 @@ declared transparent. ## Rules for Inference -Transparent traits can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference say that transparent traits are dropped from intersections where possible. +Transparent traits and classes can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference imply the following. + + - Transparent traits are dropped from intersections where possible. + - Union types are not widened if widening would result in only transparent supertypes. The precise rules are as follows: @@ -63,8 +94,8 @@ The precise rules are as follows: - where that type is not higher-kinded, - and where `B` is its known upper bound or `Any` if none exists: - If the type inferred so far is of the form `T1 & ... & Tn` where - `n >= 1`, replace the maximal number of transparent `Ti`s by `Any`, while ensuring that + `n >= 1`, replace the maximal number of transparent traits `Ti`s by `Any`, while ensuring that the resulting type is still a subtype of the bound `B`. -- However, do not perform this widening if all transparent traits `Ti` can get replaced in that way. +- However, do not perform this widening if all types `Ti` can get replaced in that way. This clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. -The last clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. +- If the original type was a is union type that got widened in a previous step to a product consisting only of transparent traits and classes, keep the original union type instead of its widened form. \ No newline at end of file diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index e11629c8eaf9..6abc3b2011d1 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -105,7 +105,10 @@ semi ::= ‘;’ | nl {nl} ## Optional Braces -The lexical analyzer also inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). +The principle of optional braces is that any keyword that can be followed by `{` can also be followed by an indented block, without needing an intervening `:`. +(Allowing an optional `:` would be counterproductive since it would introduce several ways to do the same thing.) + +The lexical analyzer inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). In the context-free productions below we use the notation `<<< ts >>>` to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. Analogously, the @@ -249,6 +252,7 @@ Catches ::= ‘catch’ (Expr | ExprCaseClause) PostfixExpr ::= InfixExpr [id] -- only if language.postfixOperators is enabled InfixExpr ::= PrefixExpr | InfixExpr id [nl] InfixExpr + | InfixExpr id ColonArgument | InfixExpr MatchClause MatchClause ::= ‘match’ <<< CaseClauses >>> PrefixExpr ::= [PrefixOperator] SimpleExpr @@ -267,6 +271,11 @@ SimpleExpr ::= SimpleRef | SimpleExpr ‘.’ MatchClause | SimpleExpr TypeArgs | SimpleExpr ArgumentExprs + | SimpleExpr ColonArgument +ColonArgument ::= colon [LambdaStart] + indent (CaseClauses | Block) outdent +LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + | HkTypeParamClause ‘=>’ Quoted ::= ‘'’ ‘{’ Block ‘}’ | ‘'’ ‘[’ Type ‘]’ ExprSplice ::= spliceId -- if inside quoted block @@ -306,7 +315,10 @@ TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } TypeCaseClause ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi] Pattern ::= Pattern1 { ‘|’ Pattern1 } -Pattern1 ::= Pattern2 [‘:’ RefinedType] +Pattern1 ::= PatVar ‘:’ RefinedType + | [‘-’] integerLiteral ‘:’ RefinedType + | [‘-’] floatingPointLiteral ‘:’ RefinedType + | Pattern2 Pattern2 ::= [id ‘@’] InfixPattern [‘*’] InfixPattern ::= SimplePattern { id [nl] SimplePattern } SimplePattern ::= PatVar @@ -329,9 +341,6 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeParamBounds -DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds - TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -343,13 +352,20 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param -Param ::= id ‘:’ ParamType [‘=’ Expr] -DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] -DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause -UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | FunArgTypes) ‘)’ -DefParams ::= DefParam {‘,’ DefParam} -DefParam ::= {Annotation} [‘inline’] Param +TypelessClauses ::= TypelessClause {TypelessClause} +TypelessClause ::= DefTermParamClause + | UsingParamClause + +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ +DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ + +DefTermParams ::= DefTermParam {‘,’ DefTermParam} +DefTermParam ::= {Annotation} [‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] ``` ### Bindings and Imports @@ -400,8 +416,8 @@ Dcl ::= RefineDcl ValDcl ::= ids ‘:’ Type VarDcl ::= ids ‘:’ Type DefDcl ::= DefSig ‘:’ Type -DefSig ::= id [DefTypeParamClause] DefParamClauses -TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] +DefSig ::= id [DefTypeParamClause] [TypelessClauses] [DefImplicitClause] +TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds Def ::= ‘val’ PatDef | ‘var’ PatDef @@ -411,7 +427,7 @@ Def ::= ‘val’ PatDef PatDef ::= ids [‘:’ Type] ‘=’ Expr | Pattern2 [‘:’ Type] ‘=’ Expr DefDef ::= DefSig [‘:’ Type] ‘=’ Expr - | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr + | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef @@ -423,10 +439,10 @@ ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefParamClause`, `UsingParamClause` must be present +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} - ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods + ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef | Export diff --git a/docs/_layouts/base.html b/docs/_layouts/base.html index 62823d08c751..feb79d1590a0 100644 --- a/docs/_layouts/base.html +++ b/docs/_layouts/base.html @@ -1,7 +1,3 @@ ---- -extraCSS: - - css/color-brewer.css ---- diff --git a/docs/_layouts/doc-page.html b/docs/_layouts/doc-page.html index 36e806678136..5f88a3301875 100644 --- a/docs/_layouts/doc-page.html +++ b/docs/_layouts/doc-page.html @@ -5,9 +5,7 @@
{% if urls.editSource %} - - Edit this page on GitHub - + Edit this page on GitHub {% endif %}

{{ page.title }}

diff --git a/docs/_layouts/index.html b/docs/_layouts/index.html index 72e3bb609d56..247f916530dd 100644 --- a/docs/_layouts/index.html +++ b/docs/_layouts/index.html @@ -1,15 +1,24 @@ --- layout: static-site-main --- -

{{ page.title }}

- -{{ content }} - -

Table of Contents

- + +
+
+ {% if urls.editSource %} + + {% endif %} +

{{ page.title }}

+
+ + {{ content }} + +

Table of Contents

+ + +
\ No newline at end of file diff --git a/docs/_layouts/main.html b/docs/_layouts/main.html index a22f912e3eef..6adc6cacda46 100644 --- a/docs/_layouts/main.html +++ b/docs/_layouts/main.html @@ -1,9 +1,5 @@ --- layout: base -extraJS: - - js/contributors.js -extraCSS: - - css/content-contributors.css ---
{{ content }}
diff --git a/docs/_layouts/static-site-main.html b/docs/_layouts/static-site-main.html index 618525782626..508cf61efe2d 100644 --- a/docs/_layouts/static-site-main.html +++ b/docs/_layouts/static-site-main.html @@ -4,12 +4,15 @@
- {% if page.movedTo %} + {% if page.nightlyOf %} {% endif %} {{ content }}
@@ -28,7 +31,7 @@
{% endif %} {% if page.next %}
- Next + Next Eta Expansion + +_Eta-expansion_ converts an expression of method type to an equivalent expression of function type. +It proceeds in two steps. + +First, one identifies the maximal sub-expressions of ´e´; let's say these are ´e_1, ..., e_m´. +For each of these, one creates a fresh name ´x_i´. +Let ´e'´ be the expression resulting from replacing every maximal subexpression ´e_i´ in ´e´ by the corresponding fresh name ´x_i´. +Second, one creates a fresh name ´y_i´ for every argument type ´T_i´ of the method (´i = 1 , ..., n´). +The result of eta-conversion is then: + +```scala +{ val ´x_1´ = ´e_1´; + ... + val ´x_m´ = ´e_m´; + (´y_1: T_1, ..., y_n: T_n´) => ´e'´(´y_1, ..., y_n´) +} +``` + +The behavior of [call-by-name parameters](#function-applications) is preserved under eta-expansion: the corresponding actual argument expression, a sub-expression of parameterless method type, is not evaluated in the expanded block. + +### Dynamic Member Selection + +The standard Scala library defines a marker trait `scala.Dynamic`. +Subclasses of this trait are able to intercept selections and applications on their instances by defining methods of the names `applyDynamic`, `applyDynamicNamed`, `selectDynamic`, and `updateDynamic`. + +The following rewrites are performed, assuming ´e´'s type conforms to `scala.Dynamic`, and the original expression does not type check under the normal rules, as specified fully in the relevant subsection of [implicit conversion](#dynamic-member-selection): + + * `e.m[Ti](xi)` becomes `e.applyDynamic[Ti]("m")(xi)` + * `e.m[Ti]` becomes `e.selectDynamic[Ti]("m")` + * `e.m = x` becomes `e.updateDynamic("m")(x)` + +If any arguments are named in the application (one of the `xi` is of the shape `arg = x`), their name is preserved as the first component of the pair passed to `applyDynamicNamed` (for missing names, `""` is used): + + * `e.m[Ti](argi = xi)` becomes `e.applyDynamicNamed[Ti]("m")(("argi", xi))` + +Finally: + + * `e.m(x) = y` becomes `e.selectDynamic("m").update(x, y)` + +None of these methods are actually defined in the `scala.Dynamic`, so that users are free to define them with or without type parameters, or implicit arguments. diff --git a/docs/_spec/07-implicits.md b/docs/_spec/07-implicits.md new file mode 100644 index 000000000000..a0db9dd9b418 --- /dev/null +++ b/docs/_spec/07-implicits.md @@ -0,0 +1,407 @@ +--- +title: Implicits +layout: default +chapter: 7 +--- + +# Implicits + +## The Implicit Modifier + +```ebnf +LocalModifier ::= ‘implicit’ +ParamClauses ::= {ParamClause} [nl] ‘(’ ‘implicit’ Params ‘)’ +``` + +Template members and parameters labeled with an `implicit` modifier can be passed to [implicit parameters](#implicit-parameters) and can be used as implicit conversions called [views](#views). +The `implicit` modifier is illegal for all type members, as well as for [top-level objects](09-top-level-definitions.html#packagings). + +###### Example Monoid + +The following code defines an abstract class of monoids and two concrete implementations, `StringMonoid` and `IntMonoid`. +The two implementations are marked implicit. + +```scala +abstract class Monoid[A] extends SemiGroup[A] { + def unit: A + def add(x: A, y: A): A +} +object Monoids { + implicit object stringMonoid extends Monoid[String] { + def add(x: String, y: String): String = x.concat(y) + def unit: String = "" + } + implicit object intMonoid extends Monoid[Int] { + def add(x: Int, y: Int): Int = x + y + def unit: Int = 0 + } +} +``` + +## Implicit Parameters + +An _implicit parameter list_ `(implicit ´p_1´,...,´p_n´)` of a method marks the parameters ´p_1, ..., p_n´ as implicit. +A method or constructor can have only one implicit parameter list, and it must be the last parameter list given. + +A method with implicit parameters can be applied to arguments just like a normal method. +In this case the `implicit` label has no effect. +However, if such a method misses arguments for its implicit parameters, such arguments will be automatically provided. + +The actual arguments that are eligible to be passed to an implicit parameter of type ´T´ fall into two categories. +First, eligible are all identifiers ´x´ that can be accessed at the point of the method call without a prefix and that denote an [implicit definition](#the-implicit-modifier) or an implicit parameter. +To be accessible without a prefix, an identifier must be a local name, a member of an enclosing template or a name introduced by an [import clause](04-basic-declarations-and-definitions.html#import-clauses). +If there are no eligible identifiers under this rule, then, second, eligible are also all `implicit` members of some object that belongs to the implicit scope of the implicit parameter's type, ´T´. + +The _implicit scope_ of a type ´T´ consists of all [companion modules](05-classes-and-objects.html#object-definitions) of classes that are associated with the implicit parameter's type. +Here, we say a class ´C´ is _associated_ with a type ´T´ if it is a [base class](05-classes-and-objects.html#class-linearization) of some part of ´T´. + +The _parts_ of a type ´T´ are: + +- if ´T´ is a compound type `´T_1´ with ... with ´T_n´`, the union of the parts of ´T_1, ..., T_n´, as well as ´T´ itself; +- if ´T´ is a parameterized type `´S´[´T_1, ..., T_n´]`, the union of the parts of ´S´ and ´T_1, ..., T_n´; +- if ´T´ is a singleton type `´p´.type`, the parts of the type of ´p´; +- if ´T´ is a type projection `´S´#´U´`, the parts of ´S´ as well as ´T´ itself; +- if ´T´ is a type alias, the parts of its expansion; +- if ´T´ is an abstract type, the parts of its upper bound; +- if ´T´ denotes an implicit conversion to a type with a method with argument types ´T_1, ..., T_n´ and result type ´U´, the union of the parts of ´T_1, ..., T_n´ and ´U´; +- the parts of quantified (existential or universal) and annotated types are defined as the parts of the underlying types (e.g., the parts of `T forSome { ... }` are the parts of `T`); +- in all other cases, just ´T´ itself. + +Note that packages are internally represented as classes with companion modules to hold the package members. +Thus, implicits defined in a package object are part of the implicit scope of a type prefixed by that package. + +If there are several eligible arguments which match the implicit parameter's type, a most specific one will be chosen using the rules of static [overloading resolution](06-expressions.html#overloading-resolution). +If the parameter has a default argument and no implicit argument can be found the default argument is used. + +###### Example +Assuming the classes from the [`Monoid` example](#example-monoid), here is a method which computes the sum of a list of elements using the monoid's `add` and `unit` operations. + +```scala +def sum[A](xs: List[A])(implicit m: Monoid[A]): A = + if (xs.isEmpty) m.unit + else m.add(xs.head, sum(xs.tail)) +``` + +The monoid in question is marked as an implicit parameter, and can therefore be inferred based on the type of the list. +Consider for instance the call `sum(List(1, 2, 3))` in a context where `stringMonoid` and `intMonoid` are visible. +We know that the formal type parameter `a` of `sum` needs to be instantiated to `Int`. +The only eligible object which matches the implicit formal parameter type `Monoid[Int]` is `intMonoid` so this object will be passed as implicit parameter. + +This discussion also shows that implicit parameters are inferred after any type arguments are [inferred](06-expressions.html#local-type-inference). + +Implicit methods can themselves have implicit parameters. +An example is the following method from module `scala.List`, which injects lists into the `scala.Ordered` class, provided the element type of the list is also convertible to this type. + +```scala +implicit def list2ordered[A](x: List[A]) + (implicit elem2ordered: A => Ordered[A]): Ordered[List[A]] = + ... +``` + +Assume in addition a method + +```scala +implicit def int2ordered(x: Int): Ordered[Int] +``` + +that injects integers into the `Ordered` class. +We can now define a `sort` method over ordered lists: + +```scala +def sort[A](xs: List[A])(implicit a2ordered: A => Ordered[A]) = ... +``` + +We can apply `sort` to a list of lists of integers `yss: List[List[Int]]` as follows: + +```scala +sort(yss) +``` + +The call above will be completed by passing two nested implicit arguments: + +```scala +sort(yss)((xs: List[Int]) => list2ordered[Int](xs)(int2ordered)) +``` + +The possibility of passing implicit arguments to implicit arguments raises the possibility of an infinite recursion. +For instance, one might try to define the following method, which injects _every_ type into the `Ordered` class: + +```scala +implicit def magic[A](x: A)(implicit a2ordered: A => Ordered[A]): Ordered[A] = + a2ordered(x) +``` + +Now, if one tried to apply `sort` to an argument `arg` of a type that did not have another injection into the `Ordered` class, one would obtain an infinite expansion: + +```scala +sort(arg)(x => magic(x)(x => magic(x)(x => ... ))) +``` + +Such infinite expansions should be detected and reported as errors, however to support the deliberate implicit construction of recursive values we allow implicit arguments to be marked as by-name. +At call sites recursive uses of implicit values are permitted if they occur in an implicit by-name argument. + +Consider the following example, + +```scala +trait Foo { + def next: Foo +} + +object Foo { + implicit def foo(implicit rec: Foo): Foo = + new Foo { def next = rec } +} + +val foo = implicitly[Foo] +assert(foo eq foo.next) +``` + +As with the `magic` case above this diverges due to the recursive implicit argument `rec` of method `foo`. +If we mark the implicit argument as by-name, + +```scala +trait Foo { + def next: Foo +} + +object Foo { + implicit def foo(implicit rec: => Foo): Foo = + new Foo { def next = rec } +} + +val foo = implicitly[Foo] +assert(foo eq foo.next) +``` + +the example compiles with the assertion successful. + +When compiled, recursive by-name implicit arguments of this sort are extracted out as val members of a local synthetic object at call sites as follows, + +```scala +val foo: Foo = scala.Predef.implicitly[Foo]( + { + object LazyDefns$1 { + val rec$1: Foo = Foo.foo(rec$1) + // ^^^^^ + // recursive knot tied here + } + LazyDefns$1.rec$1 + } +) +assert(foo eq foo.next) +``` + +Note that the recursive use of `rec$1` occurs within the by-name argument of `foo` and is consequently deferred. +The desugaring matches what a programmer would do to construct such a recursive value explicitly. + +To prevent infinite expansions, such as the `magic` example above, the compiler keeps track of a stack of “open implicit types” for which implicit arguments are currently being searched. +Whenever an implicit argument for type ´T´ is searched, ´T´ is added to the stack paired with the implicit definition which produces it, and whether it was required to satisfy a by-name implicit argument or not. +The type is removed from the stack once the search for the implicit argument either definitely fails or succeeds. +Everytime a type is about to be added to the stack, it is checked against existing entries which were produced by the same implicit definition and then, + ++ if it is equivalent to some type which is already on the stack and there is a by-name argument between that entry and the top of the stack. +In this case the search for that type succeeds immediately and the implicit argument is compiled as a recursive reference to the found argument. +That argument is added as an entry in the synthesized implicit dictionary if it has not already been added. ++ otherwise if the _core_ of the type _dominates_ the core of a type already on the stack, then the implicit expansion is said to _diverge_ and the search for that type fails immediately. ++ otherwise it is added to the stack paired with the implicit definition which produces it. +Implicit resolution continues with the implicit arguments of that definition (if any). + +Here, the _core type_ of ´T´ is ´T´ with aliases expanded, top-level type [annotations](11-annotations.html#user-defined-annotations) and [refinements](03-types.html#compound-types) removed, and occurrences of top-level existentially bound variables replaced by their upper bounds. + +A core type ´T´ _dominates_ a type ´U´ if ´T´ is [equivalent](03-types.html#equivalence) to ´U´, or if the top-level type constructors of ´T´ and ´U´ have a common element and ´T´ is more complex than ´U´ and the _covering sets_ of ´T´ and ´U´ are equal. + +The set of _top-level type constructors_ ´\mathit{ttcs}(T)´ of a type ´T´ depends on the form of the type: + +- For a type designator, ´\mathit{ttcs}(p.c) ~=~ \{c\}´; +- For a parameterized type, ´\mathit{ttcs}(p.c[\mathit{targs}]) ~=~ \{c\}´; +- For a singleton type, ´\mathit{ttcs}(p.type) ~=~ \mathit{ttcs}(T)´, provided ´p´ has type ´T´; +- For a compound type, `´\mathit{ttcs}(T_1´ with ... with ´T_n)´` ´~=~ \mathit{ttcs}(T_1) \cup ... \cup \mathit{ttcs}(T_n)´. + +The _complexity_ ´\operatorname{complexity}(T)´ of a core type is an integer which also depends on the form of the type: + +- For a type designator, ´\operatorname{complexity}(p.c) ~=~ 1 + \operatorname{complexity}(p)´ +- For a parameterized type, ´\operatorname{complexity}(p.c[\mathit{targs}]) ~=~ 1 + \Sigma \operatorname{complexity}(\mathit{targs})´ +- For a singleton type denoting a package ´p´, ´\operatorname{complexity}(p.type) ~=~ 0´ +- For any other singleton type, ´\operatorname{complexity}(p.type) ~=~ 1 + \operatorname{complexity}(T)´, provided ´p´ has type ´T´; +- For a compound type, `´\operatorname{complexity}(T_1´ with ... with ´T_n)´` ´= \Sigma\operatorname{complexity}(T_i)´ + +The _covering set_ ´\mathit{cs}(T)´ of a type ´T´ is the set of type designators mentioned in a type. +For example, given the following, + +```scala +type A = List[(Int, Int)] +type B = List[(Int, (Int, Int))] +type C = List[(Int, String)] +``` + +the corresponding covering sets are: + +- ´\mathit{cs}(A)´: List, Tuple2, Int +- ´\mathit{cs}(B)´: List, Tuple2, Int +- ´\mathit{cs}(C)´: List, Tuple2, Int, String + +###### Example +When typing `sort(xs)` for some list `xs` of type `List[List[List[Int]]]`, the sequence of types for which implicit arguments are searched is + +```scala +List[List[Int]] => Ordered[List[List[Int]]], +List[Int] => Ordered[List[Int]], +Int => Ordered[Int] +``` + +All types share the common type constructor `scala.Function1`, but the complexity of each new type is lower than the complexity of the previous types. +Hence, the code typechecks. + +###### Example +Let `ys` be a list of some type which cannot be converted to `Ordered`. +For instance: + +```scala +val ys = List(new IllegalArgumentException, new ClassCastException, new Error) +``` + +Assume that the definition of `magic` above is in scope. +Then the sequence of types for which implicit arguments are searched is + +```scala +Throwable => Ordered[Throwable], +Throwable => Ordered[Throwable], +... +``` + +Since the second type in the sequence is equal to the first, the compiler will issue an error signalling a divergent implicit expansion. + +## Views + +Implicit parameters and methods can also define implicit conversions called views. +A _view_ from type ´S´ to type ´T´ is defined by an implicit value which has function type `´S´ => ´T´` or `(=> ´S´) => ´T´` or by a method convertible to a value of that type. + +Views are applied in three situations: + +1. If an expression ´e´ is of type ´T´, and ´T´ does not conform to the expression's expected type ´\mathit{pt}´. +In this case an implicit ´v´ is searched which is applicable to ´e´ and whose result type conforms to ´\mathit{pt}´. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of `´T´ => ´\mathit{pt}´`. +If such a view is found, the expression ´e´ is converted to `´v´(´e´)`. +1. In a selection ´e.m´ with ´e´ of type ´T´, if the selector ´m´ does not denote an accessible member of ´T´. +In this case, a view ´v´ is searched which is applicable to ´e´ and whose result contains a member named ´m´. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of ´T´. +If such a view is found, the selection ´e.m´ is converted to `´v´(´e´).´m´`. +1. In a selection ´e.m(\mathit{args})´ with ´e´ of type ´T´, if the selector ´m´ denotes some member(s) of ´T´, but none of these members is applicable to the arguments ´\mathit{args}´. +In this case a view ´v´ is searched which is applicable to ´e´ and whose result contains a method ´m´ which is applicable to ´\mathit{args}´. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of ´T´. If such a view is found, the selection ´e.m´ is converted to `´v´(´e´).´m(\mathit{args})´`. + +The implicit view, if it is found, can accept its argument ´e´ as a call-by-value or as a call-by-name parameter. +However, call-by-value implicits take precedence over call-by-name implicits. + +As for implicit parameters, overloading resolution is applied if there are several possible candidates (of either the call-by-value or the call-by-name category). + +###### Example Ordered + +Class `scala.Ordered[A]` contains a method + +```scala + def <= [B >: A](that: B)(implicit b2ordered: B => Ordered[B]): Boolean +``` + +Assume two lists `xs` and `ys` of type `List[Int]` and assume that the `list2ordered` and `int2ordered` methods defined [here](#implicit-parameters) are in scope. +Then the operation + +```scala + xs <= ys +``` + +is legal, and is expanded to: + +```scala + list2ordered(xs)(int2ordered).<= + (ys) + (xs => list2ordered(xs)(int2ordered)) +``` + +The first application of `list2ordered` converts the list `xs` to an instance of class `Ordered`, whereas the second occurrence is part of an implicit parameter passed to the `<=` method. + +## Context Bounds and View Bounds + +```ebnf + TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + {‘<%’ Type} {‘:’ Type} +``` + +A type parameter ´A´ of a method or non-trait class may have one or more view bounds `´A´ <% ´T´`. +In this case the type parameter may be instantiated to any type ´S´ which is convertible by application of a view to the bound ´T´. + +A type parameter ´A´ of a method or non-trait class may also have one or more context bounds `´A´ : ´T´`. +In this case the type parameter may be instantiated to any type ´S´ for which _evidence_ exists at the instantiation point that ´S´ satisfies the bound ´T´. +Such evidence consists of an implicit value with type ´T[S]´. + +A method or class containing type parameters with view or context bounds is treated as being equivalent to a method with implicit parameters. +Consider first the case of a single parameter with view and/or context bounds such as: + +```scala +def ´f´[´A´ <% ´T_1´ ... <% ´T_m´ : ´U_1´ : ´U_n´](´\mathit{ps}´): ´R´ = ... +``` + +Then the method definition above is expanded to + +```scala +def ´f´[´A´](´\mathit{ps}´)(implicit ´v_1´: ´A´ => ´T_1´, ..., ´v_m´: ´A´ => ´T_m´, + ´w_1´: ´U_1´[´A´], ..., ´w_n´: ´U_n´[´A´]): ´R´ = ... +``` + +where the ´v_i´ and ´w_j´ are fresh names for the newly introduced implicit parameters. +These parameters are called _evidence parameters_. + +If a class or method has several view- or context-bounded type parameters, each such type parameter is expanded into evidence parameters in the order they appear and all the resulting evidence parameters are concatenated in one implicit parameter section. +Since traits do not take constructor parameters, this translation does not work for them. +Consequently, type-parameters in traits may not be view- or context-bounded. + +Evidence parameters are prepended to the existing implicit parameter section, if one exists. + +For example: + +```scala +def foo[A: M](implicit b: B): C +// expands to: +// def foo[A](implicit evidence´1: M[A], b: B): C +``` + +###### Example +The `<=` method from the [`Ordered` example](#example-ordered) can be declared more concisely as follows: + +```scala +def <= [B >: A <% Ordered[B]](that: B): Boolean +``` + +## Manifests + +Manifests are type descriptors that can be automatically generated by the Scala compiler as arguments to implicit parameters. +The Scala standard library contains a hierarchy of four manifest classes, with `OptManifest` at the top. +Their signatures follow the outline below. + +```scala +trait OptManifest[+T] +object NoManifest extends OptManifest[Nothing] +trait ClassManifest[T] extends OptManifest[T] +trait Manifest[T] extends ClassManifest[T] +``` + +If an implicit parameter of a method or constructor is of a subtype ´M[T]´ of class `OptManifest[T]`, _a manifest is determined for ´M[S]´_, according to the following rules. + +First if there is already an implicit argument that matches ´M[T]´, this argument is selected. + +Otherwise, let ´\mathit{Mobj}´ be the companion object `scala.reflect.Manifest` if ´M´ is trait `Manifest`, or be the companion object `scala.reflect.ClassManifest` otherwise. +Let ´M'´ be the trait `Manifest` if ´M´ is trait `Manifest`, or be the trait `OptManifest` otherwise. +Then the following rules apply. + +1. If ´T´ is a value class or one of the classes `Any`, `AnyVal`, `Object`, `Null`, or `Nothing`, a manifest for it is generated by selecting the corresponding manifest value `Manifest.´T´`, which exists in the `Manifest` module. +1. If ´T´ is an instance of `Array[´S´]`, a manifest is generated with the invocation `´\mathit{Mobj}´.arrayType[S](m)`, where ´m´ is the manifest determined for ´M[S]´. +1. If ´T´ is some other class type ´S´#´C[U_1, ..., U_n]´ where the prefix type ´S´ cannot be statically determined from the class ´C´, a manifest is generated with the invocation `´\mathit{Mobj}´.classType[T](´m_0´, classOf[T], ´ms´)` where ´m_0´ is the manifest determined for ´M'[S]´ and ´ms´ are the manifests determined for ´M'[U_1], ..., M'[U_n]´. +1. If ´T´ is some other class type with type arguments ´U_1, ..., U_n´, a manifest is generated with the invocation `´\mathit{Mobj}´.classType[T](classOf[T], ´ms´)` where ´ms´ are the manifests determined for ´M'[U_1], ..., M'[U_n]´. +1. If ´T´ is a singleton type `´p´.type`, a manifest is generated with the invocation `´\mathit{Mobj}´.singleType[T](´p´)` +1. If ´T´ is a refined type ´T' \{ R \}´, a manifest is generated for ´T'´. +(That is, refinements are never reflected in manifests). +1. If ´T´ is an intersection type `´T_1´ with ... with ´T_n´` where ´n > 1´, the result depends on whether a full manifest is to be determined or not. +If ´M´ is trait `Manifest`, then a manifest is generated with the invocation `Manifest.intersectionType[T](´ms´)` where ´ms´ are the manifests determined for ´M[T_1], ..., M[T_n]´. +Otherwise, if ´M´ is trait `ClassManifest`, then a manifest is generated for the [intersection dominator](03-types.html#type-erasure) of the types ´T_1, ..., T_n´. +1. If ´T´ is some other type, then if ´M´ is trait `OptManifest`, a manifest is generated from the designator `scala.reflect.NoManifest`. +If ´M´ is a type different from `OptManifest`, a static error results. diff --git a/docs/_spec/08-pattern-matching.md b/docs/_spec/08-pattern-matching.md new file mode 100644 index 000000000000..97fb73d58b06 --- /dev/null +++ b/docs/_spec/08-pattern-matching.md @@ -0,0 +1,636 @@ +--- +title: Pattern Matching +layout: default +chapter: 8 +--- + +# Pattern Matching + +## Patterns + +```ebnf + Pattern ::= Pattern1 { ‘|’ Pattern1 } + Pattern1 ::= boundvarid ‘:’ TypePat + | ‘_’ ‘:’ TypePat + | Pattern2 + Pattern2 ::= id [‘@’ Pattern3] + | Pattern3 + Pattern3 ::= SimplePattern + | SimplePattern {id [nl] SimplePattern} + SimplePattern ::= ‘_’ + | varid + | Literal + | StableId + | StableId ‘(’ [Patterns] ‘)’ + | StableId ‘(’ [Patterns ‘,’] [id ‘@’] ‘_’ ‘*’ ‘)’ + | ‘(’ [Patterns] ‘)’ + | XmlPattern + Patterns ::= Pattern {‘,’ Patterns} +``` + +A pattern is built from constants, constructors, variables and type tests. +Pattern matching tests whether a given value (or sequence of values) has the shape defined by a pattern, and, if it does, binds the variables in the pattern to the corresponding components of the value (or sequence of values). +The same variable name may not be bound more than once in a pattern. + +###### Example +Some examples of patterns are: + 1. The pattern `ex: IOException` matches all instances of class `IOException`, binding variable `ex` to the instance. + 1. The pattern `Some(x)` matches values of the form `Some(´v´)`, binding `x` to the argument value ´v´ of the `Some` constructor. + 1. The pattern `(x, _)` matches pairs of values, binding `x` to the first component of the pair. The second component is matched with a wildcard pattern. + 1. The pattern `x :: y :: xs` matches lists of length ´\geq 2´, binding `x` to the list's first element, `y` to the list's second element, and `xs` to the remainder. + 1. The pattern `1 | 2 | 3` matches the integers between 1 and 3. + +Pattern matching is always done in a context which supplies an expected type of the pattern. +We distinguish the following kinds of patterns. + +### Variable Patterns + +```ebnf + SimplePattern ::= ‘_’ + | varid +``` + +A _variable pattern_ ´x´ is a simple identifier which starts with a lower case letter. +It matches any value, and binds the variable name to that value. +The type of ´x´ is the expected type of the pattern as given from outside. +A special case is the wild-card pattern `_` which is treated as if it was a fresh variable on each occurrence. + +### Typed Patterns + +```ebnf + Pattern1 ::= varid ‘:’ TypePat + | ‘_’ ‘:’ TypePat +``` + +A _typed pattern_ ´x: T´ consists of a pattern variable ´x´ and a type pattern ´T´. +The type of ´x´ is the type pattern ´T´, where each type variable and wildcard is replaced by a fresh, unknown type. +This pattern matches any value matched by the [type pattern](#type-patterns) ´T´; it binds the variable name to that value. + +### Pattern Binders + +```ebnf + Pattern2 ::= varid ‘@’ Pattern3 +``` + +A _pattern binder_ `´x´@´p´` consists of a pattern variable ´x´ and a pattern ´p´. +The type of the variable ´x´ is the static type ´T´ implied by the pattern ´p´. +This pattern matches any value ´v´ matched by the pattern ´p´, and it binds the variable name to that value. + +A pattern ´p´ _implies_ a type ´T´ if the pattern matches only values of the type ´T´. + +### Literal Patterns + +```ebnf + SimplePattern ::= Literal +``` + +A _literal pattern_ ´L´ matches any value that is equal (in terms of `==`) to the literal ´L´. +The type of ´L´ must conform to the expected type of the pattern. + +### Interpolated string patterns + +```ebnf + Literal ::= interpolatedString +``` + +The expansion of interpolated string literals in patterns is the same as in expressions. +If it occurs in a pattern, a interpolated string literal of either of the forms +``` +id"text0{ pat1 }text1 ... { patn }textn" +id"""text0{ pat1 }text1 ... { patn }textn""" +``` +is equivalent to: +``` +StringContext("""text0""", ..., """textn""").id(pat1, ..., patn) +``` +You could define your own `StringContext` to shadow the default one that's in the `scala` package. + +This expansion is well-typed if the member `id` evaluates to an extractor object. +If the extractor object has `apply` as well as `unapply` or `unapplySeq` methods, processed strings can be used as either expressions or patterns. + +Taking XML as an example +```scala +implicit class XMLinterpolation(s: StringContext) = { + object xml { + def apply(exprs: Any*) = + // parse ‘s’ and build an XML tree with ‘exprs’ + //in the holes + def unapplySeq(xml: Node): Option[Seq[Node]] = + // match `s’ against `xml’ tree and produce + //subtrees in holes + } +} +``` +Then, XML pattern matching could be expressed like this: +```scala +case xml""" + + $linktext + + """ => ... +``` +where linktext is a variable bound by the pattern. + +### Stable Identifier Patterns + +```ebnf + SimplePattern ::= StableId +``` + +A _stable identifier pattern_ is a [stable identifier](03-types.html#paths) ´r´. +The type of ´r´ must conform to the expected type of the pattern. +The pattern matches any value ´v´ such that `´r´ == ´v´` (see [here](12-the-scala-standard-library.html#root-classes)). + +To resolve the syntactic overlap with a variable pattern, a stable identifier pattern may not be a simple name starting with a lower-case letter. +However, it is possible to enclose such a variable name in backquotes; then it is treated as a stable identifier pattern. + +###### Example +Consider the following class definition: + +```scala +class C { c => + val x = 42 + val y = 27 + val Z = 8 + def f(x: Int) = x match { + case c.x => 1 // matches 42 + case `y` => 2 // matches 27 + case Z => 3 // matches 8 + case x => 4 // matches any value + } +} +``` + +Here, the first three patterns are stable identifier patterns, while the last one is a variable pattern. + +### Constructor Patterns + +```ebnf +SimplePattern ::= StableId ‘(’ [Patterns] ‘)’ +``` + +A _constructor pattern_ is of the form ´c(p_1, ..., p_n)´ where ´n \geq 0´. It consists of a stable identifier ´c´, followed by element patterns ´p_1, ..., p_n´. +The constructor ´c´ is a simple or qualified name which denotes a [case class](05-classes-and-objects.html#case-classes). +If the case class is monomorphic, then it must conform to the expected type of the pattern, and the formal parameter types of ´x´'s [primary constructor](05-classes-and-objects.html#class-definitions) are taken as the expected types of the element patterns ´p_1, ..., p_n´. +If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of ´c´ conforms to the expected type of the pattern. +The instantiated formal parameter types of ´c´'s primary constructor are then taken as the expected types of the component patterns ´p_1, ..., p_n´. +The pattern matches all objects created from constructor invocations ´c(v_1, ..., v_n)´ where each element pattern ´p_i´ matches the corresponding value ´v_i´. + +A special case arises when ´c´'s formal parameter types end in a repeated parameter. +This is further discussed [here](#pattern-sequences). + +### Tuple Patterns + +```ebnf + SimplePattern ::= ‘(’ [Patterns] ‘)’ +``` + +A _tuple pattern_ `(´p_1´, ..., ´p_n´)` is an alias for the constructor pattern `scala.Tuple´n´(´p_1´, ..., ´p_n´)`, where ´n \geq 2´. The empty tuple `()` is the unique value of type `scala.Unit`. + +### Extractor Patterns + +```ebnf + SimplePattern ::= StableId ‘(’ [Patterns] ‘)’ +``` + +An _extractor pattern_ ´x(p_1, ..., p_n)´ where ´n \geq 0´ is of the same syntactic form as a constructor pattern. +However, instead of a case class, the stable identifier ´x´ denotes an object which has a member method named `unapply` or `unapplySeq` that matches the pattern. + +An extractor pattern cannot match the value `null`. The implementation ensures that the `unapply`/`unapplySeq` method is not applied to `null`. + +A type is said to be an _extractor type_ for some type `T` if it has a method `get` with return type `T`, and a method `isEmpty` with a return type that conforms to `Boolean`. +`Option[T]` is an extractor type for type `T`. + +An `unapply` method in an object ´x´ _matches_ the pattern ´x(p_1, ..., p_n)´ if it has a single parameter (and, optionally, an implicit parameter list) and one of the following applies: + +* ´n=0´ and `unapply`'s result type conforms to `Boolean`. +In this case the extractor pattern matches all values ´v´ for which `´x´.unapply(´v´)` yields `true`. +* ´n=1´ and `unapply`'s result type is an extractor type for some type ´T´. +In this case, the (only) argument pattern ´p_1´ is typed in turn with expected type ´T´. +The extractor pattern matches then all values ´v´ for which `´x´.unapply(´v´)` yields a value ´u´ for which `´u´.isEmpty` yields `false`, `´u´.get` yields a value ´v_1´, and ´p_1´ matches ´v_1´. +* ´n>1´ and `unapply`'s result type is an extractor type for some type ´T´ with members ´\_1, ..., \_n´ returning types ´T_1, ..., T_n´. +In this case, the argument patterns ´p_1, ..., p_n´ are typed in turn with expected types ´T_1 , ..., T_n´. +The extractor pattern matches then all values ´v´ for which `´x´.unapply(´v´)` yields a value ´u´ for which `´u´.isEmpty` yields `false`, `´u´.get` yields some value ´t´, and each pattern ´p_i´ matches the corresponding value ´t._1´ from ´t._1, ..., t._n´. + +An `unapplySeq` method in an object ´x´ matches the pattern ´x(q_1, ..., q_m, p_1, ..., p_n)´ if it takes exactly one argument and its result type is of the form `Option[(´T_1, ..., T_m´, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). +This case is further discussed [below](#pattern-sequences). + +###### Example 1 + +If we define an extractor object `Pair`: + +```scala +object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) +} +``` + +This means that the name `Pair` can be used in place of `Tuple2` for tuple formation as well as for deconstruction of tuples in patterns. +Hence, the following is possible: + +```scala +val x = (1, 2) +val y = x match { + case Pair(i, s) => Pair(s + i, i * i) +} +``` + +###### Example 2 + +If we define a class `NameBased` + +```scala +class NameBased[A, B](a: A, b: B) { + def isEmpty = false + def get = this + def _1 = a + def _2 = b +} +``` + +Then `NameBased` is an extractor type for `NameBased` itself, since it has a member `isEmpty` returning a value of type Boolean, and it has a member `get` returning a value of type `NameBased`. + +Since it also has members `_1` and `_2`, it can be used in an extractor pattern with n = 2 as follows: + +```scala +object Extractor { + def unapply(x: Any) = new NameBased(1, "two") +} + +"anything" match { + case Extractor(a, b) => println(s"\$a, \$b") //prints "1, two" +} +``` + + +### Pattern Sequences + +```ebnf +SimplePattern ::= StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’ +``` + +A _pattern sequence_ ´p_1, ..., p_n´ appears in two contexts. +First, in a constructor pattern ´c(q_1, ..., q_m, p_1, ..., p_n)´, where ´c´ is a case class which has ´m+1´ primary constructor parameters, ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`. +Second, in an extractor pattern ´x(q_1, ..., q_m, p_1, ..., p_n)´ if the extractor object ´x´ does not have an `unapply` method, but it does define an `unapplySeq` method with a result type that is an extractor type for type `(T_1, ... , T_m, Seq[S])` (if `m = 0`, an extractor type for the type `Seq[S]` is also accepted). The expected type for the patterns ´p_i´ is ´S´. + +The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`. +Each element pattern ´p_i´ is type-checked with ´S´ as expected type, unless it is a sequence wildcard. +If a final sequence wildcard is present, the pattern matches all values ´v´ that are sequences which start with elements matching patterns ´p_1, ..., p_{n-1}´. +If no final sequence wildcard is given, the pattern matches all values ´v´ that are sequences of length ´n´ which consist of elements matching patterns ´p_1, ..., p_n´. + +### Infix Operation Patterns + +```ebnf + Pattern3 ::= SimplePattern {id [nl] SimplePattern} +``` + +An _infix operation pattern_ ´p;\mathit{op};q´ is a shorthand for the +constructor or extractor pattern ´\mathit{op}(p, q)´. +The precedence and associativity of operators in patterns is the same as in [expressions](06-expressions.html#prefix,-infix,-and-postfix-operations). + +An infix operation pattern ´p;\mathit{op};(q_1, ..., q_n)´ is a shorthand for the constructor or extractor pattern ´\mathit{op}(p, q_1, ..., q_n)´. + +### Pattern Alternatives + +```ebnf + Pattern ::= Pattern1 { ‘|’ Pattern1 } +``` + +A _pattern alternative_ `´p_1´ | ... | ´p_n´` consists of a number of alternative patterns ´p_i´. +All alternative patterns are type checked with the expected type of the pattern. +They may not bind variables other than wildcards. +The alternative pattern matches a value ´v´ if at least one its alternatives matches ´v´. + +### XML Patterns + +XML patterns are treated [here](10-xml-expressions-and-patterns.html#xml-patterns). + +### Regular Expression Patterns + +Regular expression patterns have been discontinued in Scala from version 2.0. + +Later version of Scala provide a much simplified version of regular expression patterns that cover most scenarios of non-text sequence processing. +A _sequence pattern_ is a pattern that stands in a position where either (1) a pattern of a type `T` which is conforming to `Seq[A]` for some `A` is expected, or (2) a case class constructor that has an iterated formal parameter `A*`. +A wildcard star pattern `_*` in the rightmost position stands for arbitrary long sequences. +It can be bound to variables using `@`, as usual, in which case the variable will have the type `Seq[A]`. + +### Irrefutable Patterns + +A pattern ´p´ is _irrefutable_ for a type ´T´, if one of the following applies: + +1. ´p´ is a variable pattern, +1. ´p´ is a typed pattern ´x: T'´, and ´T <: T'´, +1. ´p´ is a constructor pattern ´c(p_1, ..., p_n)´, the type ´T´ is an instance of class ´c´, the [primary constructor](05-classes-and-objects.html#class-definitions) of type ´T´ has argument types ´T_1, ..., T_n´, and each ´p_i´ is irrefutable for ´T_i´. +1. ´p´ is an extractor pattern for which the extractor type is `Some[´T´]` for some type ´T´ +1. ´p´ is an extractor pattern for which the extractor types `isEmpty` method is the singleton type `false` +1. ´p´ is an extractor pattern for which the return type is the singleton type `true` + +## Type Patterns + +```ebnf + TypePat ::= Type +``` + +Type patterns consist of types, type variables, and wildcards. +A type pattern ´T´ is of one of the following forms: + +* A reference to a class ´C´, ´p.C´, or `´T´#´C´`. +This type pattern matches any non-null instance of the given class. +Note that the prefix of the class, if it exists, is relevant for determining class instances. +For instance, the pattern ´p.C´ matches only instances of classes ´C´ which were created with the path ´p´ as prefix. +This also applies to prefixes which are not given syntactically. +For example, if ´C´ refers to a class defined in the nearest enclosing class and is thus equivalent to ´this.C´, it is considered to have a prefix. + +The bottom types `scala.Nothing` and `scala.Null` cannot be used as type patterns, because they would match nothing in any case. + +* A singleton type `´p´.type`. This type pattern matches only the value denoted by the path ´p´ (the `eq` method is used to compare the matched value to ´p´). + +* A literal type `´lit´`. This type pattern matches only the value denoted by the literal ´lit´ (the `==` method is used to compare the matched value to ´lit´). + +* A compound type pattern `´T_1´ with ... with ´T_n´` where each ´T_i´ is a type pattern. +This type pattern matches all values that are matched by each of the type patterns ´T_i´. + +* A parameterized type pattern ´T[a_1, ..., a_n]´, where the ´a_i´ are type variable patterns or wildcards `_`. +This type pattern matches all values which match ´T´ for some arbitrary instantiation of the type variables and wildcards. +The bounds or alias type of these type variable are determined as described [here](#type-parameter-inference-in-patterns). + +* A parameterized type pattern `scala.Array´[T_1]´`, where ´T_1´ is a type pattern. +This type pattern matches any non-null instance of type `scala.Array´[U_1]´`, where ´U_1´ is a type matched by ´T_1´. + +Types which are not of one of the forms described above are also accepted as type patterns. +However, such type patterns will be translated to their [erasure](03-types.html#type-erasure). +The Scala compiler will issue an "unchecked" warning for these patterns to flag the possible loss of type-safety. + +A _type variable pattern_ is a simple identifier which starts with a lower case letter. + +## Type Parameter Inference in Patterns + +Type parameter inference is the process of finding bounds for the bound type variables in a typed pattern or constructor pattern. +Inference takes into account the expected type of the pattern. + +### Type parameter inference for typed patterns + +Assume a typed pattern ´p: T'´. Let ´T´ result from ´T'´ where all wildcards in ´T'´ are renamed to fresh variable names. +Let ´a_1, ..., a_n´ be the type variables in ´T´. +These type variables are considered bound in the pattern. +Let the expected type of the pattern be ´\mathit{pt}´. + +Type parameter inference constructs first a set of subtype constraints over the type variables ´a_i´. +The initial constraints set ´\mathcal{C}\_0´ reflects just the bounds of these type variables. +That is, assuming ´T´ has bound type variables ´a_1, ..., a_n´ which correspond to class type parameters ´a_1', ..., a_n'´ with lower bounds ´L_1, ..., L_n´ and upper bounds ´U_1, ..., U_n´, ´\mathcal{C}_0´ contains the constraints + +$$ +\begin{cases} +a_i &<: \sigma U_i & \quad (i = 1, ..., n) \\\\ +\sigma L_i &<: a_i & \quad (i = 1, ..., n) +\end{cases} +$$ + +where ´\sigma´ is the substitution ´[a_1' := a_1, ..., a_n' :=a_n]´. + +The set ´\mathcal{C}_0´ is then augmented by further subtype constraints. +There are two cases. + +###### Case 1 +If there exists a substitution ´\sigma´ over the type variables ´a_i, ..., a_n´ such that ´\sigma T´ conforms to ´\mathit{pt}´, one determines the weakest subtype constraints ´\mathcal{C}\_1´ over the type variables ´a_1, ..., a_n´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}_1´ implies that ´T´ conforms to ´\mathit{pt}´. + +###### Case 2 +Otherwise, if ´T´ can not be made to conform to ´\mathit{pt}´ by instantiating its type variables, one determines all type variables in ´\mathit{pt}´ which are defined as type parameters of a method enclosing the pattern. +Let the set of such type parameters be ´b_1 , ..., b_m´. +Let ´\mathcal{C}\_0'´ be the subtype constraints reflecting the bounds of the type variables ´b_i´. +If ´T´ denotes an instance type of a final class, let ´\mathcal{C}\_2´ be the weakest set of subtype constraints over the type variables ´a_1, ..., a_n´ and ´b_1, ..., b_m´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2´ implies that ´T´ conforms to ´\mathit{pt}´. +If ´T´ does not denote an instance type of a final class, let ´\mathcal{C}\_2´ be the weakest set of subtype constraints over the type variables ´a_1, ..., a_n´ and ´b_1, ..., b_m´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2´ implies that it is possible to construct a type ´T'´ which conforms to both ´T´ and ´\mathit{pt}´. +It is a static error if there is no satisfiable set of constraints ´\mathcal{C}\_2´ with this property. + +The final step consists in choosing type bounds for the type variables which imply the established constraint system. +The process is different for the two cases above. + +###### Case 1 +We take ´a_i >: L_i <: U_i´ where each ´L_i´ is minimal and each ´U_i´ is maximal wrt ´<:´ such that ´a_i >: L_i <: U_i´ for ´i = 1, ..., n´ implies ´\mathcal{C}\_0 \wedge \mathcal{C}\_1´. + +###### Case 2 +We take ´a_i >: L_i <: U_i´ and ´b\_i >: L_i' <: U_i' ´ where each ´L_i´ and ´L_j'´ is minimal and each ´U_i´ and ´U_j'´ is maximal such that ´a_i >: L_i <: U_i´ for ´i = 1, ..., n´ and ´b_j >: L_j' <: U_j'´ for ´j = 1, ..., m´ implies ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}_2´. + +In both cases, local type inference is permitted to limit the complexity of inferred bounds. +Minimality and maximality of types have to be understood relative to the set of types of acceptable complexity. + +### Type parameter inference for constructor patterns +Assume a constructor pattern ´C(p_1, ..., p_n)´ where class ´C´ has type parameters ´a_1, ..., a_n´. +These type parameters are inferred in the same way as for the typed pattern `(_: ´C[a_1, ..., a_n]´)`. + +###### Example +Consider the program fragment: + +```scala +val x: Any +x match { + case y: List[a] => ... +} +``` + +Here, the type pattern `List[a]` is matched against the expected type `Any`. +The pattern binds the type variable `a`. +Since `List[a]` conforms to `Any` for every type argument, there are no constraints on `a`. +Hence, `a` is introduced as an abstract type with no bounds. +The scope of `a` is right-hand side of its case clause. + +On the other hand, if `x` is declared as + +```scala +val x: List[List[String]], +``` + +this generates the constraint `List[a] <: List[List[String]]`, which simplifies to `a <: List[String]`, because `List` is covariant. +Hence, `a` is introduced with upper bound `List[String]`. + +###### Example +Consider the program fragment: + +```scala +val x: Any +x match { + case y: List[String] => ... +} +``` + +Scala does not maintain information about type arguments at run-time, so there is no way to check that `x` is a list of strings. +Instead, the Scala compiler will [erase](03-types.html#type-erasure) the pattern to `List[_]`; that is, it will only test whether the top-level runtime-class of the value `x` conforms to `List`, and the pattern match will succeed if it does. +This might lead to a class cast exception later on, in the case where the list `x` contains elements other than strings. +The Scala compiler will flag this potential loss of type-safety with an "unchecked" warning message. + +###### Example +Consider the program fragment + +```scala +class Term[A] +class Number(val n: Int) extends Term[Int] +def f[B](t: Term[B]): B = t match { + case y: Number => y.n +} +``` + +The expected type of the pattern `y: Number` is `Term[B]`. +The type `Number` does not conform to `Term[B]`; hence Case 2 of the rules above applies. +This means that `B` is treated as another type variable for which subtype constraints are inferred. +In our case the applicable constraint is `Number <: Term[B]`, which entails `B = Int`. Hence, `B` is treated in the case clause as an abstract type with lower and upper bound `Int`. +Therefore, the right hand side of the case clause, `y.n`, of type `Int`, is found to conform to the method's declared result type, `Number`. + +## Pattern Matching Expressions + +```ebnf + Expr ::= PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ + CaseClauses ::= CaseClause {CaseClause} + CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block +``` + +A _pattern matching expression_ + +```scala +e match { case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ } +``` + +consists of a selector expression ´e´ and a number ´n > 0´ of cases. +Each case consists of a (possibly guarded) pattern ´p_i´ and a block ´b_i´. +Each ´p_i´ might be complemented by a guard `if ´e´` where ´e´ is a boolean expression. +The scope of the pattern variables in ´p_i´ comprises the pattern's guard and the corresponding block ´b_i´. + +Let ´T´ be the type of the selector expression ´e´ and let ´a_1, ..., a_m´ be the type parameters of all methods enclosing the pattern matching expression. +For every ´a_i´, let ´L_i´ be its lower bound and ´U_i´ be its higher bound. +Every pattern ´p \in \{p_1,, ..., p_n\}´ can be typed in two ways. +First, it is attempted to type ´p´ with ´T´ as its expected type. +If this fails, ´p´ is instead typed with a modified expected type ´T'´ which results from ´T´ by replacing every occurrence of a type parameter ´a_i´ by +*undefined*. +If this second step fails also, a compile-time error results. +If the second step succeeds, let ´T_p´ be the type of pattern ´p´ seen as an expression. +One then determines minimal bounds ´L_11, ..., L_m'´ and maximal bounds ´U_1', ..., U_m'´ such that for all ´i´, ´L_i <: L_i'´ and ´U_i' <: U_i´ and the following constraint system is satisfied: + +$$ +L_1 <: a_1 <: U_1\;\wedge\;...\;\wedge\;L_m <: a_m <: U_m \ \Rightarrow\ T_p <: T +$$ + +If no such bounds can be found, a compile time error results. +If such bounds are found, the pattern matching clause starting with ´p´ is then typed under the assumption that each ´a_i´ has lower bound ´L_i'´ instead of ´L_i´ and has upper bound ´U_i'´ instead of ´U_i´. + +The expected type of every block ´b_i´ is the expected type of the whole pattern matching expression. +The type of the pattern matching expression is then the [weak least upper bound](03-types.html#weak-conformance) of the types of all blocks ´b_i´. + +When applying a pattern matching expression to a selector value, patterns are tried in sequence until one is found which matches the [selector value](#patterns). +Say this case is `case ´p_i \Rightarrow b_i´`. +The result of the whole expression is the result of evaluating ´b_i´, where all pattern variables of ´p_i´ are bound to the corresponding parts of the selector value. +If no matching pattern is found, a `scala.MatchError` exception is thrown. + +The pattern in a case may also be followed by a guard suffix `if e` with a boolean expression ´e´. +The guard expression is evaluated if the preceding pattern in the case matches. +If the guard expression evaluates to `true`, the pattern match succeeds as normal. +If the guard expression evaluates to `false`, the pattern in the case is considered not to match and the search for a matching pattern continues. + +In the interest of efficiency the evaluation of a pattern matching expression may try patterns in some other order than textual sequence. +This might affect evaluation through side effects in guards. +However, it is guaranteed that a guard expression is evaluated only if the pattern it guards matches. + +If the selector of a pattern match is an instance of a [`sealed` class](05-classes-and-objects.html#modifiers), the compilation of pattern matching can emit warnings which diagnose that a given set of patterns is not exhaustive, i.e. that there is a possibility of a `MatchError` being raised at run-time. + +###### Example + +Consider the following definitions of arithmetic terms: + +```scala +abstract class Term[T] +case class Lit(x: Int) extends Term[Int] +case class Succ(t: Term[Int]) extends Term[Int] +case class IsZero(t: Term[Int]) extends Term[Boolean] +case class If[T](c: Term[Boolean], + t1: Term[T], + t2: Term[T]) extends Term[T] +``` + +There are terms to represent numeric literals, incrementation, a zero test, and a conditional. +Every term carries as a type parameter the type of the expression it represents (either `Int` or `Boolean`). + +A type-safe evaluator for such terms can be written as follows. + +```scala +def eval[T](t: Term[T]): T = t match { + case Lit(n) => n + case Succ(u) => eval(u) + 1 + case IsZero(u) => eval(u) == 0 + case If(c, u1, u2) => eval(if (eval(c)) u1 else u2) +} +``` + +Note that the evaluator makes crucial use of the fact that type parameters of enclosing methods can acquire new bounds through pattern matching. + +For instance, the type of the pattern in the second case, `Succ(u)`, is `Int`. +It conforms to the selector type `T` only if we assume an upper and lower bound of `Int` for `T`. +Under the assumption `Int <: T <: Int` we can also verify that the type right hand side of the second case, `Int` conforms to its expected type, `T`. + +## Pattern Matching Anonymous Functions + +```ebnf + BlockExpr ::= ‘{’ CaseClauses ‘}’ +``` + +An anonymous function can be defined by a sequence of cases + +```scala +{ case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ } +``` + +which appear as an expression without a prior `match`. +The expected type of such an expression must in part be defined. +It must be either `scala.Function´k´[´S_1, ..., S_k´, ´R´]` for some ´k > 0´, or `scala.PartialFunction[´S_1´, ´R´]`, where the argument type(s) ´S_1, ..., S_k´ must be fully determined, but the result type ´R´ may be undetermined. + +If the expected type is [SAM-convertible](06-expressions.html#sam-conversion) to `scala.Function´k´[´S_1, ..., S_k´, ´R´]`, the expression is taken to be equivalent to the anonymous function: + +```scala +(´x_1: S_1, ..., x_k: S_k´) => (´x_1, ..., x_k´) match { + case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ +} +``` + +Here, each ´x_i´ is a fresh name. +As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn equivalent to the following instance creation expression, where ´T´ is the weak least upper bound of the types of all ´b_i´. + +```scala +new scala.Function´k´[´S_1, ..., S_k´, ´T´] { + def apply(´x_1: S_1, ..., x_k: S_k´): ´T´ = (´x_1, ..., x_k´) match { + case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ + } +} +``` + +If the expected type is `scala.PartialFunction[´S´, ´R´]`, the expression is taken to be equivalent to the following instance creation expression: + +```scala +new scala.PartialFunction[´S´, ´T´] { + def apply(´x´: ´S´): ´T´ = x match { + case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ + } + def isDefinedAt(´x´: ´S´): Boolean = { + case ´p_1´ => true ... case ´p_n´ => true + case _ => false + } +} +``` + +Here, ´x´ is a fresh name and ´T´ is the weak least upper bound of the types of all ´b_i´. +The final default case in the `isDefinedAt` method is omitted if one of the patterns ´p_1, ..., p_n´ is already a variable or wildcard pattern. + +###### Example +Here's an example which uses `foldLeft` to compute the scalar product of two vectors: + +```scala +def scalarProduct(xs: Array[Double], ys: Array[Double]) = + (xs zip ys).foldLeft(0.0) { + case (a, (b, c)) => a + b * c + } +``` + +The case clauses in this code are equivalent to the following anonymous function: + +```scala +(x, y) => (x, y) match { + case (a, (b, c)) => a + b * c +} +``` diff --git a/docs/_spec/09-top-level-definitions.md b/docs/_spec/09-top-level-definitions.md new file mode 100644 index 000000000000..8406c0180533 --- /dev/null +++ b/docs/_spec/09-top-level-definitions.md @@ -0,0 +1,178 @@ +--- +title: Top-Level Definitions +layout: default +chapter: 9 +--- + +# Top-Level Definitions + +## Compilation Units + +```ebnf +CompilationUnit ::= {‘package’ QualId semi} TopStatSeq +TopStatSeq ::= TopStat {semi TopStat} +TopStat ::= {Annotation} {Modifier} TmplDef + | Import + | Packaging + | PackageObject + | +QualId ::= id {‘.’ id} +``` + +A compilation unit consists of a sequence of packagings, import clauses, and class and object definitions, which may be preceded by a package clause. + +A _compilation unit_ + +```scala +package ´p_1´; +... +package ´p_n´; +´\mathit{stats}´ +``` + +starting with one or more package clauses is equivalent to a compilation unit consisting of the packaging + +```scala +package ´p_1´ { ... + package ´p_n´ { + ´\mathit{stats}´ + } ... +} +``` + +Every compilation unit implicitly imports the following packages, in the given order: + 1. the package `java.lang`, + 2. the package `scala`, and + 3. the object [`scala.Predef`](12-the-scala-standard-library.html#the-predef-object), unless there is an explicit top-level import that references `scala.Predef`. + +Members of a later import in that order hide members of an earlier import. + +The exception to the implicit import of `scala.Predef` can be useful to hide, e.g., predefined implicit conversions. + +## Packagings + +```ebnf +Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’ +``` + +A _package_ is a special object which defines a set of member classes, objects and packages. +Unlike other objects, packages are not introduced by a definition. +Instead, the set of members of a package is determined by packagings. + +A packaging `package ´p´ { ´\mathit{ds}´ }` injects all definitions in ´\mathit{ds}´ as members into the package whose qualified name is ´p´. +Members of a package are called _top-level_ definitions. +If a definition in ´\mathit{ds}´ is labeled `private`, it is visible only for other members in the package. + +Inside the packaging, all members of package ´p´ are visible under their simple names. +However this rule does not extend to members of enclosing packages of ´p´ that are designated by a prefix of the path ´p´. + +```scala +package org.net.prj { + ... +} +``` + +all members of package `org.net.prj` are visible under their simple names, but members of packages `org` or `org.net` require explicit qualification or imports. + +Selections ´p´.´m´ from ´p´ as well as imports from ´p´ work as for objects. +However, unlike other objects, packages may not be used as values. +It is illegal to have a package with the same fully qualified name as a module or a class. + +Top-level definitions outside a packaging are assumed to be injected into a special empty package. +That package cannot be named and therefore cannot be imported. +However, members of the empty package are visible to each other without qualification. + +## Package Objects + +```ebnf +PackageObject ::= ‘package’ ‘object’ ObjectDef +``` + +A _package object_ `package object ´p´ extends ´t´` adds the members of template ´t´ to the package ´p´. +There can be only one package object per package. +The standard naming convention is to place the definition above in a file named `package.scala` that's located in the directory corresponding to package ´p´. + +The package object should not define a member with the same name as one of the top-level objects or classes defined in package ´p´. +If there is a name conflict, the behavior of the program is currently undefined. +It is expected that this restriction will be lifted in a future version of Scala. + +## Package References + +```ebnf +QualId ::= id {‘.’ id} +``` + +A reference to a package takes the form of a qualified identifier. +Like all other references, package references are relative. +That is, a package reference starting in a name ´p´ will be looked up in the closest enclosing scope that defines a member named ´p´. + +If a package name is shadowed, it's possible to refer to its fully-qualified name by prefixing it with the special predefined name `_root_`, which refers to the outermost root package that contains all top-level packages. + +The name `_root_` has this special denotation only when used as the first element of a qualifier; it is an ordinary identifier otherwise. + +###### Example +Consider the following program: + +```scala +package b { + class B +} + +package a { + package b { + class A { + val x = new _root_.b.B + } + class C { + import _root_.b._ + def y = new B + } + } +} + +``` + +Here, the reference `_root_.b.B` refers to class `B` in the toplevel package `b`. +If the `_root_` prefix had been omitted, the name `b` would instead resolve to the package `a.b`, and, provided that package does not also contain a class `B`, a compiler-time error would result. + +## Programs + +A _program_ is a top-level object that has a member method _main_ of type `(Array[String])Unit`. Programs can be executed from a command shell. +The program's command arguments are passed to the `main` method as a parameter of type `Array[String]`. + +The `main` method of a program can be directly defined in the object, or it can be inherited. +The scala library defines a special class `scala.App` whose body acts as a `main` method. +An objects ´m´ inheriting from this class is thus a program, which executes the initialization code of the object ´m´. + +###### Example +The following example will create a hello world program by defining a method `main` in module `test.HelloWorld`. + +```scala +package test +object HelloWorld { + def main(args: Array[String]) { println("Hello World") } +} +``` + +This program can be started by the command + +```scala +scala test.HelloWorld +``` + +In a Java environment, the command + +```scala +java test.HelloWorld +``` + +would work as well. + +`HelloWorld` can also be defined without a `main` method by inheriting from `App` instead: + +```scala +package test +object HelloWorld extends App { + println("Hello World") +} +``` diff --git a/docs/_spec/10-xml-expressions-and-patterns.md b/docs/_spec/10-xml-expressions-and-patterns.md new file mode 100644 index 000000000000..c929e24fe93d --- /dev/null +++ b/docs/_spec/10-xml-expressions-and-patterns.md @@ -0,0 +1,124 @@ +--- +title: XML +layout: default +chapter: 10 +--- + +# XML Expressions and Patterns + +__By Burak Emir__ + +This chapter describes the syntactic structure of XML expressions and patterns. +It follows as closely as possible the XML 1.0 specification, changes being mandated by the possibility of embedding Scala code fragments. + +## XML expressions + +XML expressions are expressions generated by the following production, where the opening bracket `<` of the first element must be in a position to start the lexical [XML mode](01-lexical-syntax.html#xml-mode). + +```ebnf +XmlExpr ::= XmlContent {Element} +``` + +Well-formedness constraints of the XML specification apply, which means for instance that start tags and end tags must match, and attributes may only be defined once, except for constraints related to entity resolution. + +The following productions describe Scala's extensible markup language, designed as close as possible to the W3C extensible markup language standard. +Only the productions for attribute values and character data are changed. +Scala does not support declarations. +Entity references are not resolved at runtime. + +```ebnf +Element ::= EmptyElemTag + | STag Content ETag + +EmptyElemTag ::= ‘<’ Name {S Attribute} [S] ‘/>’ + +STag ::= ‘<’ Name {S Attribute} [S] ‘>’ +ETag ::= ‘’ +Content ::= [CharData] {Content1 [CharData]} +Content1 ::= XmlContent + | Reference + | ScalaExpr +XmlContent ::= Element + | CDSect + | PI + | Comment +``` + +If an XML expression is a single element, its value is a runtime representation of an XML node (an instance of a subclass of `scala.xml.Node`). +If the XML expression consists of more than one element, then its value is a runtime representation of a sequence of XML nodes (an instance of a subclass of `scala.Seq[scala.xml.Node]`). + +If an XML expression is an entity reference, CDATA section, processing instruction, or a comment, it is represented by an instance of the corresponding Scala runtime class. + +By default, beginning and trailing whitespace in element content is removed, and consecutive occurrences of whitespace are replaced by a single space character `\u0020`. +This behavior can be changed to preserve all whitespace with a compiler option. + +```ebnf +Attribute ::= Name Eq AttValue + +AttValue ::= ‘"’ {CharQ | CharRef} ‘"’ + | ‘'’ {CharA | CharRef} ‘'’ + | ScalaExpr + +ScalaExpr ::= Block + +CharData ::= { CharNoRef } ´\textit{ without}´ {CharNoRef}‘{’CharB {CharNoRef} + ´\textit{ and without}´ {CharNoRef}‘]]>’{CharNoRef} +``` + + +XML expressions may contain Scala expressions as attribute values or within nodes. +In the latter case, these are embedded using a single opening brace `{` and ended by a closing brace `}`. +To express a single opening braces within XML text as generated by CharData, it must be doubled. +Thus, `{{` represents the XML text `{` and does not introduce an embedded Scala expression. + + +```ebnf +BaseChar, CDSect, Char, Comment, CombiningChar, Ideographic, NameChar, PI, S, Reference + ::= ´\textit{“as in W3C XML”}´ + +Char1 ::= Char ´\textit{ without}´ ‘<’ | ‘&’ +CharQ ::= Char1 ´\textit{ without}´ ‘"’ +CharA ::= Char1 ´\textit{ without}´ ‘'’ +CharB ::= Char1 ´\textit{ without}´ ‘{’ + +Name ::= XNameStart {NameChar} + +XNameStart ::= ‘_’ | BaseChar | Ideographic + ´\textit{ (as in W3C XML, but without }´ ‘:’´)´ +``` + +## XML patterns + +XML patterns are patterns generated by the following production, where the opening bracket `<` of the element patterns must be in a position to start the lexical [XML mode](01-lexical-syntax.html#xml-mode). + +```ebnf +XmlPattern ::= ElementPattern +``` + +Well-formedness constraints of the XML specification apply. + +An XML pattern has to be a single element pattern. +It matches exactly those runtime representations of an XML tree that have the same structure as described by the pattern. +XML patterns may contain [Scala patterns](08-pattern-matching.html#pattern-matching-expressions). + +Whitespace is treated the same way as in XML expressions. + +By default, beginning and trailing whitespace in element content is removed, and consecutive occurrences of whitespace are replaced by a single space character `\u0020`. +This behavior can be changed to preserve all whitespace with a compiler option. + +```ebnf +ElemPattern ::= EmptyElemTagP + | STagP ContentP ETagP + +EmptyElemTagP ::= ‘<’ Name [S] ‘/>’ +STagP ::= ‘<’ Name [S] ‘>’ +ETagP ::= ‘’ +ContentP ::= [CharData] {(ElemPattern|ScalaPatterns) [CharData]} +ContentP1 ::= ElemPattern + | Reference + | CDSect + | PI + | Comment + | ScalaPatterns +ScalaPatterns ::= ‘{’ Patterns ‘}’ +``` diff --git a/docs/_spec/11-annotations.md b/docs/_spec/11-annotations.md new file mode 100644 index 000000000000..11325a1639f0 --- /dev/null +++ b/docs/_spec/11-annotations.md @@ -0,0 +1,126 @@ +--- +title: Annotations +layout: default +chapter: 11 +--- + +# Annotations + +```ebnf + Annotation ::= ‘@’ SimpleType {ArgumentExprs} + ConstrAnnotation ::= ‘@’ SimpleType ArgumentExprs +``` + +## Definition + +Annotations associate meta-information with definitions. +A simple annotation has the form `@´c´` or `@´c(a_1, ..., a_n)´`. +Here, ´c´ is a constructor of a class ´C´, which must conform to the class `scala.Annotation`. + +Annotations may apply to definitions or declarations, types, or expressions. +An annotation of a definition or declaration appears in front of that definition. +An annotation of a type appears after that type. +An annotation of an expression ´e´ appears after the expression ´e´, separated by a colon. +More than one annotation clause may apply to an entity. +The order in which these annotations are given does not matter. + +Examples: + +```scala +@deprecated("Use D", "1.0") class C { ... } // Class annotation +@transient @volatile var m: Int // Variable annotation +String @local // Type annotation +(e: @unchecked) match { ... } // Expression annotation +``` + +## Predefined Annotations + +### Java Platform Annotations + +The meaning of annotation clauses is implementation-dependent. +On the Java platform, the following annotations have a standard meaning. + +* `@transient` Marks a field to be non-persistent; this is equivalent to the `transient` modifier in Java. + +* `@volatile` Marks a field which can change its value outside the control of the program; this is equivalent to the `volatile` modifier in Java. + +* `@SerialVersionUID()` Attaches a serial version identifier (a `long` constant) to a class. +This is equivalent to the following field definition in Java: + +```java +private final static SerialVersionUID = +``` + +* `@throws()` A Java compiler checks that a program contains handlers for checked exceptions by analyzing which checked exceptions can result from the execution of a method or constructor. +For each checked exception which is a possible result, the `throws` clause for the method or constructor must mention the class of that exception or one of the superclasses of the class of that exception. + +### Java Beans Annotations + +* `@scala.beans.BeanProperty` When prefixed to a definition of some variable `X`, this annotation causes getter and setter methods `getX`, `setX` in the Java bean style to be added in the class containing the variable. +The first letter of the variable appears capitalized after the `get` or `set`. +When the annotation is added to the definition of an immutable value definition `X`, only a getter is generated. +The construction of these methods is part of code-generation; therefore, these methods become visible only once a classfile for the containing class is generated. + +* `@scala.beans.BooleanBeanProperty` This annotation is equivalent to `scala.reflect.BeanProperty`, but the generated getter method is named `isX` instead of `getX`. + +### Deprecation Annotations + +* `@deprecated(message: , since: )`
+Marks a definition as deprecated. +Accesses to the defined entity will then cause a deprecated warning mentioning the _message_ `` to be issued from the compiler. +The argument _since_ documents since when the definition should be considered deprecated.
+Deprecated warnings are suppressed in code that belongs itself to a definition that is labeled deprecated. + +* `@deprecatedName(name: , since: )`
+Marks a formal parameter name as deprecated. +Invocations of this entity using named parameter syntax referring to the deprecated parameter name cause a deprecation warning. + +### Scala Compiler Annotations + +* `@unchecked` When applied to the selector of a `match` expression, this attribute suppresses any warnings about non-exhaustive pattern matches that would otherwise be emitted. +For instance, no warnings would be produced for the method definition below. +```scala +def f(x: Option[Int]) = (x: @unchecked) match { + case Some(y) => y +} +``` +Without the `@unchecked` annotation, a Scala compiler could infer that the pattern match is non-exhaustive, and could produce a warning because `Option` is a `sealed` class. + +* `@uncheckedStable` When applied a value declaration or definition, it allows the defined value to appear in a path, even if its type is [volatile](03-types.html#volatile-types). +For instance, the following member definitions are legal: +```scala +type A { type T } +type B +@uncheckedStable val x: A with B // volatile type +val y: x.T // OK since `x' is still a path +``` +Without the `@uncheckedStable` annotation, the designator `x` would not be a path since its type `A with B` is volatile. +Hence, the reference `x.T` would be malformed. + +When applied to value declarations or definitions that have non-volatile types, the annotation has no effect. + +* `@specialized` When applied to the definition of a type parameter, this annotation causes the compiler to generate specialized definitions for primitive types. +An optional list of primitive types may be given, in which case specialization takes into account only those types. +For instance, the following code would generate specialized traits for `Unit`, `Int` and `Double` +```scala +trait Function0[@specialized(Unit, Int, Double) T] { + def apply: T +} +``` +Whenever the static type of an expression matches a specialized variant of a definition, the compiler will instead use the specialized version. +See the [specialization sid](https://docs.scala-lang.org/sips/scala-specialization.html) for more details of the implementation. + + +## User-defined Annotations + +Other annotations may be interpreted by platform- or application-dependent tools. +The class `scala.annotation.Annotation` is the base class for user-defined annotations. It has two sub-traits: +- `scala.annotation.StaticAnnotation`: Instances of a subclass of this trait will be stored in the generated class files, and therefore accessible to runtime reflection and later compilation runs. +- `scala.annotation.ConstantAnnotation`: Instances of a subclass of this trait may only have arguments which are [constant expressions](06-expressions.html#constant-expressions), and are also stored in the generated class files. +- If an annotation class inherits from neither `scala.ConstantAnnotation` nor `scala.StaticAnnotation`, its instances are visible only locally during the compilation run that analyzes them. + +## Host-platform Annotations + +The host platform may define its own annotation format. +These annotations do not extend any of the classes in the `scala.annotation` package, but can generally be used in the same way as Scala annotations. +The host platform may impose additional restrictions on the expressions which are valid as annotation arguments. diff --git a/docs/_spec/12-the-scala-standard-library.md b/docs/_spec/12-the-scala-standard-library.md new file mode 100644 index 000000000000..401735286a51 --- /dev/null +++ b/docs/_spec/12-the-scala-standard-library.md @@ -0,0 +1,701 @@ +--- +title: Standard Library +layout: default +chapter: 12 +--- + +# The Scala Standard Library + +The Scala standard library consists of the package `scala` with a number of classes and modules. +Some of these classes are described in the following. + +![Class hierarchy of Scala](public/images/classhierarchy.png) + +## Root Classes + +The root of this hierarchy is formed by class `Any`. +Every class in a Scala execution environment inherits directly or indirectly from this class. +Class `Any` has two direct subclasses: `AnyRef` and `AnyVal`. + +The subclass `AnyRef` represents all values which are represented as objects in the underlying host system. +Classes written in other languages inherit from `scala.AnyRef`. + +The predefined subclasses of class `AnyVal` describe values which are not implemented as objects in the underlying host system. + +User-defined Scala classes which do not explicitly inherit from `AnyVal` inherit directly or indirectly from `AnyRef`. +They cannot inherit from both `AnyRef` and `AnyVal`. + +Classes `AnyRef` and `AnyVal` are required to provide only the members declared in class `Any`, but implementations may add host-specific methods to these classes (for instance, an implementation may identify class `AnyRef` with its own root class for objects). + +The signatures of these root classes are described by the following definitions. + +```scala +package scala +/** The universal root class */ +abstract class Any { + + /** Defined equality; abstract here */ + def equals(that: Any): Boolean + + /** Semantic equality between values */ + final def == (that: Any): Boolean = + if (null eq this) null eq that else this equals that + + /** Semantic inequality between values */ + final def != (that: Any): Boolean = !(this == that) + + /** Hash code; abstract here */ + def hashCode: Int = ... + + /** Textual representation; abstract here */ + def toString: String = ... + + /** Type test; needs to be inlined to work as given */ + def isInstanceOf[a]: Boolean + + /** Type cast; needs to be inlined to work as given */ */ + def asInstanceOf[A]: A = this match { + case x: A => x + case _ => if (this eq null) this + else throw new ClassCastException() + } +} + +/** The root class of all value types */ +final class AnyVal extends Any + +/** The root class of all reference types */ +class AnyRef extends Any { + def equals(that: Any): Boolean = this eq that + final def eq(that: AnyRef): Boolean = ... // reference equality + final def ne(that: AnyRef): Boolean = !(this eq that) + + def hashCode: Int = ... // hashCode computed from allocation address + def toString: String = ... // toString computed from hashCode and class name + + def synchronized[T](body: => T): T // execute `body` in while locking `this`. +} +``` + +The type test `´x´.isInstanceOf[´T´]` is equivalent to a typed pattern match + +```scala +´x´ match { + case _: ´T'´ => true + case _ => false +} +``` + +where the type ´T'´ is the same as ´T´ except if ´T´ is of the form ´D´ or ´D[\mathit{tps}]´ where ´D´ is a type member of some outer class ´C´. +In this case ´T'´ is `´C´#´D´` (or `´C´#´D[tps]´`, respectively), whereas ´T´ itself would expand to `´C´.this.´D[tps]´`. +In other words, an `isInstanceOf` test does not check that types have the same enclosing instance. + +The test `´x´.asInstanceOf[´T´]` is treated specially if ´T´ is a [numeric value type](#value-classes). +In this case the cast will be translated to an application of a [conversion method](#numeric-value-types) `x.to´T´`. +For non-numeric values ´x´ the operation will raise a `ClassCastException`. + +## Value Classes + +Value classes are classes whose instances are not represented as objects by the underlying host system. +All value classes inherit from class `AnyVal`. +Scala implementations need to provide the value classes `Unit`, `Boolean`, `Double`, `Float`, `Long`, `Int`, `Char`, `Short`, and `Byte` (but are free to provide others as well). +The signatures of these classes are defined in the following. + +### Numeric Value Types + +Classes `Double`, `Float`, `Long`, `Int`, `Char`, `Short`, and `Byte` are together called _numeric value types_. +Classes `Byte`, `Short`, or `Char` are called _subrange types_. +Subrange types, as well as `Int` and `Long` are called _integer types_, whereas `Float` and `Double` are called _floating point types_. + +Numeric value types are ranked in the following partial order: + +```scala +Byte - Short + \ + Int - Long - Float - Double + / + Char +``` + +`Byte` and `Short` are the lowest-ranked types in this order, whereas `Double` is the highest-ranked. +Ranking does _not_ +imply a [conformance relationship](03-types.html#conformance); for instance `Int` is not a subtype of `Long`. +However, object [`Predef`](#the-predef-object) defines [views](07-implicits.html#views) from every numeric value type to all higher-ranked numeric value types. +Therefore, lower-ranked types are implicitly converted to higher-ranked types when required by the [context](06-expressions.html#implicit-conversions). + +Given two numeric value types ´S´ and ´T´, the _operation type_ of ´S´ and ´T´ is defined as follows: If both ´S´ and ´T´ are subrange types then the operation type of ´S´ and ´T´ is `Int`. +Otherwise the operation type of ´S´ and ´T´ is the larger of the two types wrt +ranking. +Given two numeric values ´v´ and ´w´ the operation type of ´v´ and ´w´ is the operation type of their run-time types. + +Any numeric value type ´T´ supports the following methods. + +* Comparison methods for equals (`==`), not-equals (`!=`), less-than (`<`), greater-than (`>`), less-than-or-equals (`<=`), greater-than-or-equals (`>=`), which each exist in 7 overloaded alternatives. +Each alternative takes a parameter of some numeric value type. +Its result type is type `Boolean`. +The operation is evaluated by converting the receiver and its argument to their operation type and performing the given comparison operation of that type. +* Arithmetic methods addition (`+`), subtraction (`-`), multiplication (`*`), division (`/`), and remainder (`%`), which each exist in 7 overloaded alternatives. +Each alternative takes a parameter of some numeric value type ´U´. +Its result type is the operation type of ´T´ and ´U´. +The operation is evaluated by converting the receiver and its argument to their operation type and performing the given arithmetic operation of that type. +* Parameterless arithmetic methods identity (`+`) and negation (`-`), with result type ´T´. +The first of these returns the receiver unchanged, whereas the second returns its negation. +* Conversion methods `toByte`, `toShort`, `toChar`, `toInt`, `toLong`, `toFloat`, `toDouble` which convert the receiver object to the target type, using the rules of Java's numeric type cast operation. +The conversion might truncate the numeric value (as when going from `Long` to `Int` or from `Int` to `Byte`) or it might lose precision (as when going from `Double` to `Float` or when converting between `Long` and `Float`). + +Integer numeric value types support in addition the following operations: + +* Bit manipulation methods bitwise-and (`&`), bitwise-or {`|`}, and bitwise-exclusive-or (`^`), which each exist in 5 overloaded alternatives. +Each alternative takes a parameter of some integer numeric value type. +Its result type is the operation type of ´T´ and ´U´. +The operation is evaluated by converting the receiver and its argument to their operation type and performing the given bitwise operation of that type. + +* A parameterless bit-negation method (`~`). +Its result type is the receiver type ´T´ or `Int`, whichever is larger. +The operation is evaluated by converting the receiver to the result type and negating every bit in its value. +* Bit-shift methods left-shift (`<<`), arithmetic right-shift (`>>`), and unsigned right-shift (`>>>`). +Each of these methods has two overloaded alternatives, which take a parameter ´n´ of type `Int`, respectively `Long`. +The result type of the operation is the receiver type ´T´, or `Int`, whichever is larger. +The operation is evaluated by converting the receiver to the result type and performing the specified shift by ´n´ bits. + +Numeric value types also implement operations `equals`, `hashCode`, and `toString` from class `Any`. + +The `equals` method tests whether the argument is a numeric value type. +If this is true, it will perform the `==` operation which is appropriate for that type. +That is, the `equals` method of a numeric value type can be thought of being defined as follows: + +```scala +def equals(other: Any): Boolean = other match { + case that: Byte => this == that + case that: Short => this == that + case that: Char => this == that + case that: Int => this == that + case that: Long => this == that + case that: Float => this == that + case that: Double => this == that + case _ => false +} +``` + +The `hashCode` method returns an integer hashcode that maps equal numeric values to equal results. +It is guaranteed to be the identity for type `Int` and for all subrange types. + +The `toString` method displays its receiver as an integer or floating point number. + +###### Example + +This is the signature of the numeric value type `Int`: + +```scala +package scala +abstract sealed class Int extends AnyVal { + def == (that: Double): Boolean // double equality + def == (that: Float): Boolean // float equality + def == (that: Long): Boolean // long equality + def == (that: Int): Boolean // int equality + def == (that: Short): Boolean // int equality + def == (that: Byte): Boolean // int equality + def == (that: Char): Boolean // int equality + /* analogous for !=, <, >, <=, >= */ + + def + (that: Double): Double // double addition + def + (that: Float): Double // float addition + def + (that: Long): Long // long addition + def + (that: Int): Int // int addition + def + (that: Short): Int // int addition + def + (that: Byte): Int // int addition + def + (that: Char): Int // int addition + /* analogous for -, *, /, % */ + + def & (that: Long): Long // long bitwise and + def & (that: Int): Int // int bitwise and + def & (that: Short): Int // int bitwise and + def & (that: Byte): Int // int bitwise and + def & (that: Char): Int // int bitwise and + /* analogous for |, ^ */ + + def << (cnt: Int): Int // int left shift + def << (cnt: Long): Int // long left shift + /* analogous for >>, >>> */ + + def unary_+ : Int // int identity + def unary_- : Int // int negation + def unary_~ : Int // int bitwise negation + + def toByte: Byte // convert to Byte + def toShort: Short // convert to Short + def toChar: Char // convert to Char + def toInt: Int // convert to Int + def toLong: Long // convert to Long + def toFloat: Float // convert to Float + def toDouble: Double // convert to Double +} +``` + +### Class `Boolean` + +Class `Boolean` has only two values: `true` and `false`. +It implements operations as given in the following class definition. + +```scala +package scala +abstract sealed class Boolean extends AnyVal { + def && (p: => Boolean): Boolean = // boolean and + if (this) p else false + def || (p: => Boolean): Boolean = // boolean or + if (this) true else p + def & (x: Boolean): Boolean = // boolean strict and + if (this) x else false + def | (x: Boolean): Boolean = // boolean strict or + if (this) true else x + def == (x: Boolean): Boolean = // boolean equality + if (this) x else x.unary_! + def != (x: Boolean): Boolean = // boolean inequality + if (this) x.unary_! else x + def unary_!: Boolean = // boolean negation + if (this) false else true +} +``` + +The class also implements operations `equals`, `hashCode`, and `toString` from class `Any`. + +The `equals` method returns `true` if the argument is the same boolean value as the receiver, `false` otherwise. +The `hashCode` method returns a fixed, implementation-specific hash-code when invoked on `true`, and a different, fixed, implementation-specific hash-code when invoked on `false`. +The `toString` method returns the receiver converted to a string, i.e. either `"true"` or `"false"`. + +### Class `Unit` + +Class `Unit` has only one value: `()`. +It implements only the three methods `equals`, `hashCode`, and `toString` from class `Any`. + +The `equals` method returns `true` if the argument is the unit value `()`, `false` otherwise. +The `hashCode` method returns a fixed, implementation-specific hash-code. +The `toString` method returns `"()"`. + +## Standard Reference Classes + +This section presents some standard Scala reference classes which are treated in a special way by the Scala compiler – either Scala provides syntactic sugar for them, or the Scala compiler generates special code for their operations. +Other classes in the standard Scala library are documented in the Scala library documentation by HTML pages. + +### Class `String` + +Scala's `String` class is usually derived from the standard String class of the underlying host system (and may be identified with it). +For Scala clients the class is taken to support in each case a method + +```scala +def + (that: Any): String +``` + +which concatenates its left operand with the textual representation of its right operand. + +### The `Tuple` classes + +Scala defines tuple classes `Tuple´n´` for ´n = 2, ..., 22´. +These are defined as follows. + +```scala +package scala +case class Tuple´n´[+T_1, ..., +T_n](_1: T_1, ..., _´n´: T_´n´) { + def toString = "(" ++ _1 ++ "," ++ ... ++ "," ++ _´n´ ++ ")" +} +``` + +### The `Function` Classes + +Scala defines function classes `Function´n´` for ´n = 1 , \ldots , 22´. +These are defined as follows. + +```scala +package scala +trait Function´n´[-T_1, ..., -T_´n´, +R] { + def apply(x_1: T_1, ..., x_´n´: T_´n´): R + def toString = "" +} +``` + +The `PartialFunction` subclass of `Function1` represents functions that (indirectly) specify their domain. +Use the `isDefined` method to query whether the partial function is defined for a given input (i.e., whether the input is part of the function's domain). + +```scala +class PartialFunction[-A, +B] extends Function1[A, B] { + def isDefinedAt(x: A): Boolean +} +``` + +The implicitly imported [`Predef`](#the-predef-object) object defines the name `Function` as an alias of `Function1`. + +### Class `Array` + +All operations on arrays desugar to the corresponding operations of the underlying platform. +Therefore, the following class definition is given for informational purposes only: + +```scala +final class Array[T](_length: Int) +extends java.io.Serializable with java.lang.Cloneable { + def length: Int = ... + def apply(i: Int): T = ... + def update(i: Int, x: T): Unit = ... + override def clone(): Array[T] = ... +} +``` + +If ´T´ is not a type parameter or abstract type, the type `Array[T]` is represented as the array type `|T|[]` in the underlying host system, where `|T|` is the erasure of `T`. +If ´T´ is a type parameter or abstract type, a different representation might be used (it is `Object` on the Java platform). + +#### Operations + +`length` returns the length of the array, `apply` means subscripting, and `update` means element update. + +Because of the syntactic sugar for `apply` and `update` operations, we have the following correspondences between Scala and Java code for operations on an array `xs`: + +|_Scala_ |_Java_ | +|------------------|------------| +|`xs.length` |`xs.length` | +|`xs(i)` |`xs[i]` | +|`xs(i) = e` |`xs[i] = e` | + +Two implicit conversions exist in `Predef` that are frequently applied to arrays: a conversion to `scala.collection.mutable.ArrayOps` and a conversion to `scala.collection.mutable.ArraySeq` (a subtype of `scala.collection.Seq`). + +Both types make many of the standard operations found in the Scala collections API available. +The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return a value of type `Array`, while the conversion to `ArraySeq` is permanent as all operations return a value of type `ArraySeq`. +The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. + +Because of the tension between parametrized types in Scala and the ad-hoc implementation of arrays in the host-languages, some subtle points need to be taken into account when dealing with arrays. +These are explained in the following. + +#### Variance + +Unlike arrays in Java, arrays in Scala are _not_ co-variant; That is, ´S <: T´ does not imply `Array[´S´] ´<:´ Array[´T´]` in Scala. +However, it is possible to cast an array of ´S´ to an array of ´T´ if such a cast is permitted in the host environment. + +For instance `Array[String]` does not conform to `Array[Object]`, even though `String` conforms to `Object`. +However, it is possible to cast an expression of type `Array[String]` to `Array[Object]`, and this cast will succeed without raising a `ClassCastException`. Example: + +```scala +val xs = new Array[String](2) +// val ys: Array[Object] = xs // **** error: incompatible types +val ys: Array[Object] = xs.asInstanceOf[Array[Object]] // OK +``` + +The instantiation of an array with a polymorphic element type ´T´ requires information about type ´T´ at runtime. +This information is synthesized by adding a [context bound](07-implicits.html#context-bounds-and-view-bounds) of `scala.reflect.ClassTag` to type ´T´. +An example is the following implementation of method `mkArray`, which creates an array of an arbitrary type ´T´, given a sequence of ´T´`s which defines its elements: + +```scala +import reflect.ClassTag +def mkArray[T : ClassTag](elems: Seq[T]): Array[T] = { + val result = new Array[T](elems.length) + var i = 0 + for (elem <- elems) { + result(i) = elem + i += 1 + } + result +} +``` + +If type ´T´ is a type for which the host platform offers a specialized array representation, this representation is used. + +###### Example +On the Java Virtual Machine, an invocation of `mkArray(List(1,2,3))` will return a primitive array of `int`s, written as `int[]` in Java. + +#### Companion object + +`Array`'s companion object provides various factory methods for the instantiation of single- and multi-dimensional arrays, an extractor method [`unapplySeq`](08-pattern-matching.html#extractor-patterns) which enables pattern matching over arrays and additional utility methods: + +```scala +package scala +object Array { + /** copies array elements from `src` to `dest`. */ + def copy(src: AnyRef, srcPos: Int, + dest: AnyRef, destPos: Int, length: Int): Unit = ... + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = + + /** Create an array with given elements. */ + def apply[T: ClassTag](xs: T*): Array[T] = ... + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = ... + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = ... + ... + + /** Concatenate all argument arrays into a single array. */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = ... + + /** Returns an array that contains the results of some element computation a number + * of times. */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = ... + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = ... + ... + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = ... + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = ... + ... + + /** Returns an array containing a sequence of increasing integers in a range. */ + def range(start: Int, end: Int): Array[Int] = ... + /** Returns an array containing equally spaced values in some integer interval. */ + def range(start: Int, end: Int, step: Int): Array[Int] = ... + + /** Returns an array containing repeated applications of a function to a start value. */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = ... + + /** Enables pattern matching over arrays */ + def unapplySeq[A](x: Array[A]): Option[IndexedSeq[A]] = Some(x) +} +``` + +## Class Node + +```scala +package scala.xml + +trait Node { + + /** the label of this node */ + def label: String + + /** attribute axis */ + def attribute: Map[String, String] + + /** child axis (all children of this node) */ + def child: Seq[Node] + + /** descendant axis (all descendants of this node) */ + def descendant: Seq[Node] = child.toList.flatMap { + x => x::x.descendant.asInstanceOf[List[Node]] + } + + /** descendant axis (all descendants of this node) */ + def descendant_or_self: Seq[Node] = this::child.toList.flatMap { + x => x::x.descendant.asInstanceOf[List[Node]] + } + + override def equals(x: Any): Boolean = x match { + case that:Node => + that.label == this.label && + that.attribute.sameElements(this.attribute) && + that.child.sameElements(this.child) + case _ => false + } + + /** XPath style projection function. Returns all children of this node + * that are labeled with 'that'. The document order is preserved. + */ + def \(that: Symbol): NodeSeq = { + new NodeSeq({ + that.name match { + case "_" => child.toList + case _ => + var res:List[Node] = Nil + for (x <- child.elements if x.label == that.name) { + res = x::res + } + res.reverse + } + }) + } + + /** XPath style projection function. Returns all nodes labeled with the + * name 'that' from the 'descendant_or_self' axis. Document order is preserved. + */ + def \\(that: Symbol): NodeSeq = { + new NodeSeq( + that.name match { + case "_" => this.descendant_or_self + case _ => this.descendant_or_self.asInstanceOf[List[Node]]. + filter(x => x.label == that.name) + }) + } + + /** hashcode for this XML node */ + override def hashCode = + Utility.hashCode(label, attribute.toList.hashCode, child) + + /** string representation of this node */ + override def toString = Utility.toXML(this) + +} +``` + +## The `Predef` Object + +The `Predef` object defines standard methods and type aliases for Scala programs. +It is implicitly imported, as described in [the chapter on name binding](02-identifiers-names-and-scopes.html), so that all its defined members are available without qualification. +Its definition for the JVM environment conforms to the following signature: + +```scala +package scala +object Predef { + + // classOf --------------------------------------------------------- + + /** Returns the runtime representation of a class type. */ + def classOf[T]: Class[T] = null + // this is a dummy, classOf is handled by compiler. + + // valueOf ----------------------------------------------------------- + + /** Retrieve the single value of a type with a unique inhabitant. */ + @inline def valueOf[T](implicit vt: ValueOf[T]): T {} = vt.value + // instances of the ValueOf type class are provided by the compiler. + + // Standard type aliases --------------------------------------------- + + type String = java.lang.String + type Class[T] = java.lang.Class[T] + + // Miscellaneous ----------------------------------------------------- + + type Function[-A, +B] = Function1[A, B] + + type Map[A, +B] = collection.immutable.Map[A, B] + type Set[A] = collection.immutable.Set[A] + + val Map = collection.immutable.Map + val Set = collection.immutable.Set + + // Manifest types, companions, and incantations for summoning --------- + + type ClassManifest[T] = scala.reflect.ClassManifest[T] + type Manifest[T] = scala.reflect.Manifest[T] + type OptManifest[T] = scala.reflect.OptManifest[T] + val ClassManifest = scala.reflect.ClassManifest + val Manifest = scala.reflect.Manifest + val NoManifest = scala.reflect.NoManifest + + def manifest[T](implicit m: Manifest[T]) = m + def classManifest[T](implicit m: ClassManifest[T]) = m + def optManifest[T](implicit m: OptManifest[T]) = m + + // Minor variations on identity functions ----------------------------- + def identity[A](x: A): A = x + def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world + @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements + + // Asserts, Preconditions, Postconditions ----------------------------- + + def assert(assertion: Boolean) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed") + } + + def assert(assertion: Boolean, message: => Any) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed: " + message) + } + + def assume(assumption: Boolean) { + if (!assumption) + throw new IllegalArgumentException("assumption failed") + } + + def assume(assumption: Boolean, message: => Any) { + if (!assumption) + throw new IllegalArgumentException(message.toString) + } + + def require(requirement: Boolean) { + if (!requirement) + throw new IllegalArgumentException("requirement failed") + } + + def require(requirement: Boolean, message: => Any) { + if (!requirement) + throw new IllegalArgumentException("requirement failed: "+ message) + } +``` + +```scala + // Printing and reading ----------------------------------------------- + + def print(x: Any) = Console.print(x) + def println() = Console.println() + def println(x: Any) = Console.println(x) + def printf(text: String, xs: Any*) = Console.printf(text.format(xs: _*)) + + // Implicit conversions ------------------------------------------------ + + ... +} +``` + +### Predefined Implicit Definitions + +The `Predef` object also contains a number of implicit definitions, which are available by default (because `Predef` is implicitly imported). +Implicit definitions come in two priorities. +High-priority implicits are defined in the `Predef` class itself whereas low priority implicits are defined in a class inherited by `Predef`. +The rules of static [overloading resolution](06-expressions.html#overloading-resolution) stipulate that, all other things being equal, implicit resolution prefers high-priority implicits over low-priority ones. + +The available low-priority implicits include definitions falling into the following categories. + +1. For every primitive type, a wrapper that takes values of that type to instances of a `runtime.Rich*` class. +For instance, values of type `Int` can be implicitly converted to instances of class `runtime.RichInt`. + +1. For every array type with elements of primitive type, a wrapper that takes the arrays of that type to instances of a `ArraySeq` class. +For instance, values of type `Array[Float]` can be implicitly converted to instances of class `ArraySeq[Float]`. +There are also generic array wrappers that take elements of type `Array[T]` for arbitrary `T` to `ArraySeq`s. + +1. An implicit conversion from `String` to `WrappedString`. + +The available high-priority implicits include definitions falling into the following categories. + +* An implicit wrapper that adds `ensuring` methods with the following overloaded variants to type `Any`. +```scala +def ensuring(cond: Boolean): A = { assert(cond); x } +def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x } +def ensuring(cond: A => Boolean): A = { assert(cond(x)); x } +def ensuring(cond: A => Boolean, msg: Any): A = { assert(cond(x), msg); x } +``` + +* An implicit wrapper that adds a `->` method with the following implementation to type `Any`. +```scala +def -> [B](y: B): (A, B) = (x, y) +``` + +* For every array type with elements of primitive type, a wrapper that takes the arrays of that type to instances of a `runtime.ArrayOps` class. +For instance, values of type `Array[Float]` can be implicitly converted to instances of class `runtime.ArrayOps[Float]`. +There are also generic array wrappers that take elements of type `Array[T]` for arbitrary `T` to `ArrayOps`s. + +* An implicit wrapper that adds `+` and `formatted` method with the following implementations to type `Any`. +```scala +def +(other: String) = String.valueOf(self) + other +def formatted(fmtstr: String): String = fmtstr format self +``` + +* Numeric primitive conversions that implement the transitive closure of the following mappings: +``` +Byte -> Short +Short -> Int +Char -> Int +Int -> Long +Long -> Float +Float -> Double +``` + +* Boxing and unboxing conversions between primitive types and their boxed versions: +``` +Byte <-> java.lang.Byte +Short <-> java.lang.Short +Char <-> java.lang.Character +Int <-> java.lang.Integer +Long <-> java.lang.Long +Float <-> java.lang.Float +Double <-> java.lang.Double +Boolean <-> java.lang.Boolean +``` + +* An implicit definition that generates instances of type `T <:< T`, for any type `T`. Here, `<:<` is a class defined as follows. +```scala +sealed abstract class <:<[-From, +To] extends (From => To) +``` +Implicit parameters of `<:<` types are typically used to implement type constraints. diff --git a/docs/_spec/13-syntax-summary.md b/docs/_spec/13-syntax-summary.md new file mode 100644 index 000000000000..7c1d394bd4e1 --- /dev/null +++ b/docs/_spec/13-syntax-summary.md @@ -0,0 +1,329 @@ +--- +title: Syntax Summary +layout: default +chapter: 13 +--- + +# Syntax Summary + +The following descriptions of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. + +## Lexical Syntax + +The lexical syntax of Scala is given by the following grammar in EBNF form: + +```ebnf +whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ +upper ::= ‘A’ | ... | ‘Z’ | ‘$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that doesn't have + contributory property Other_Lowercase +lower ::= ‘a’ | ... | ‘z’ | ‘_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase +letter ::= upper | lower +digit ::= ‘0’ | ... | ‘9’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ +delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ +opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | + ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ +charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) +escapeSeq ::= UnicodeEscape | charEscapeSeq +op ::= opchar {opchar} +varid ::= lower idrest +boundvarid ::= varid + | ‘`’ varid ‘`’ +plainid ::= upper idrest + | varid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] + +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +decimalNumeral ::= digit {digit} +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit} + +floatingPointLiteral + ::= digit {digit} ‘.’ digit {digit} [exponentPart] [floatType] + | ‘.’ digit {digit} [exponentPart] [floatType] + | digit {digit} exponentPart [floatType] + | digit {digit} [exponentPart] floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit {digit} +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ + +booleanLiteral ::= ‘true’ | ‘false’ + +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ + +stringLiteral ::= ‘"’ {stringElement} ‘"’ + | ‘"""’ multiLineChars ‘"""’ +stringElement ::= charNoDoubleQuoteOrNewline + | escapeSeq +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} + +interpolatedString + ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart + ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘\$\$’ + | ‘\$"’ + | ‘\$’ alphaid + | ‘\$’ BlockExpr +alphaid ::= upper idrest + | varid + +symbolLiteral ::= ‘'’ plainid + +comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ + | ‘//’ “any sequence of characters up to end of line” + +nl ::= ´\mathit{“new line character”}´ +semi ::= ‘;’ | nl {nl} +``` + +## Context-free Syntax + +The context-free syntax of Scala is given by the following EBNF grammar: + +```ebnf + Literal ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral + | interpolatedString + | symbolLiteral + | ‘null’ + + QualId ::= id {‘.’ id} + ids ::= id {‘,’ id} + + Path ::= StableId + | [id ‘.’] ‘this’ + StableId ::= id + | Path ‘.’ id + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id + ClassQualifier ::= ‘[’ id ‘]’ + + Type ::= FunctionArgTypes ‘=>’ Type + | InfixType [ExistentialClause] + FunctionArgTypes ::= InfixType + | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ + ExistentialClause ::= ‘forSome’ ‘{’ ExistentialDcl {semi ExistentialDcl} ‘}’ + ExistentialDcl ::= ‘type’ TypeDcl + | ‘val’ ValDcl + InfixType ::= CompoundType {id [nl] CompoundType} + CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] + | Refinement + AnnotType ::= SimpleType {Annotation} + SimpleType ::= SimpleType TypeArgs + | SimpleType ‘#’ id + | StableId + | Path ‘.’ ‘type’ + | ‘(’ Types ‘)’ + TypeArgs ::= ‘[’ Types ‘]’ + Types ::= Type {‘,’ Type} + Refinement ::= [nl] ‘{’ RefineStat {semi RefineStat} ‘}’ + RefineStat ::= Dcl + | ‘type’ TypeDef + | + TypePat ::= Type + + Ascription ::= ‘:’ InfixType + | ‘:’ Annotation {Annotation} + | ‘:’ ‘_’ ‘*’ + + Expr ::= (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr + | Expr1 + Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] + | ‘while’ ‘(’ Expr ‘)’ {nl} Expr + | ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] + | ‘do’ Expr [semi] ‘while’ ‘(’ Expr ‘)’ + | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr + | ‘throw’ Expr + | ‘return’ [Expr] + | [SimpleExpr ‘.’] id ‘=’ Expr + | PrefixOperator SimpleExpr ‘=’ Expr + | SimpleExpr1 ArgumentExprs ‘=’ Expr + | PostfixExpr + | PostfixExpr Ascription + | PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ + PostfixExpr ::= InfixExpr [id [nl]] + InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr + PrefixExpr ::= [PrefixOperator] SimpleExpr + PrefixOperator ::= ‘-’ | ‘+’ | ‘~’ | ‘!’ + SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody) + | BlockExpr + | SimpleExpr1 [‘_’] + SimpleExpr1 ::= Literal + | Path + | ‘_’ + | ‘(’ [Exprs] ‘)’ + | SimpleExpr ‘.’ id + | SimpleExpr TypeArgs + | SimpleExpr1 ArgumentExprs + | XmlExpr + Exprs ::= Expr {‘,’ Expr} + ArgumentExprs ::= ‘(’ [Exprs] ‘)’ + | ‘(’ [Exprs ‘,’] PostfixExpr ‘:’ ‘_’ ‘*’ ‘)’ + | [nl] BlockExpr + BlockExpr ::= ‘{’ CaseClauses ‘}’ + | ‘{’ Block ‘}’ + Block ::= BlockStat {semi BlockStat} [ResultExpr] + BlockStat ::= Import + | {Annotation} [‘implicit’] [‘lazy’] Def + | {Annotation} {LocalModifier} TmplDef + | Expr1 + | + ResultExpr ::= Expr1 + | (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block + + Enumerators ::= Generator {semi Generator} + Generator ::= [‘case’] Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} + + CaseClauses ::= CaseClause { CaseClause } + CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block + Guard ::= ‘if’ PostfixExpr + + Pattern ::= Pattern1 { ‘|’ Pattern1 } + Pattern1 ::= boundvarid ‘:’ TypePat + | ‘_’ ‘:’ TypePat + | Pattern2 + Pattern2 ::= id [‘@’ Pattern3] + | Pattern3 + Pattern3 ::= SimplePattern + | SimplePattern { id [nl] SimplePattern } + SimplePattern ::= ‘_’ + | varid + | Literal + | StableId + | StableId ‘(’ [Patterns] ‘)’ + | StableId ‘(’ [Patterns ‘,’] [id ‘@’] ‘_’ ‘*’ ‘)’ + | ‘(’ [Patterns] ‘)’ + | XmlPattern + Patterns ::= Pattern [‘,’ Patterns] + | ‘_’ ‘*’ + + TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’ + FunTypeParamClause::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ + VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam + TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + {‘<%’ Type} {‘:’ Type} + ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’] + ParamClause ::= [nl] ‘(’ [Params] ‘)’ + Params ::= Param {‘,’ Param} + Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr] + ParamType ::= Type + | ‘=>’ Type + | Type ‘*’ + ClassParamClauses ::= {ClassParamClause} + [[nl] ‘(’ ‘implicit’ ClassParams ‘)’] + ClassParamClause ::= [nl] ‘(’ [ClassParams] ‘)’ + ClassParams ::= ClassParam {‘,’ ClassParam} + ClassParam ::= {Annotation} {Modifier} [(‘val’ | ‘var’)] + id ‘:’ ParamType [‘=’ Expr] + Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’ + Binding ::= (id | ‘_’) [‘:’ Type] + + Modifier ::= LocalModifier + | AccessModifier + | ‘override’ + LocalModifier ::= ‘abstract’ + | ‘final’ + | ‘sealed’ + | ‘implicit’ + | ‘lazy’ + AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] + AccessQualifier ::= ‘[’ (id | ‘this’) ‘]’ + + Annotation ::= ‘@’ SimpleType {ArgumentExprs} + ConstrAnnotation ::= ‘@’ SimpleType ArgumentExprs + + TemplateBody ::= [nl] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’ + TemplateStat ::= Import + | {Annotation [nl]} {Modifier} Def + | {Annotation [nl]} {Modifier} Dcl + | Expr + | + SelfType ::= id [‘:’ Type] ‘=>’ + | ‘this’ ‘:’ Type ‘=>’ + + Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} + ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors) + ImportSelectors ::= ‘{’ {ImportSelector ‘,’} (ImportSelector | ‘_’) ‘}’ + ImportSelector ::= id [‘=>’ id | ‘=>’ ‘_’] + + Dcl ::= ‘val’ ValDcl + | ‘var’ VarDcl + | ‘def’ FunDcl + | ‘type’ {nl} TypeDcl + + ValDcl ::= ids ‘:’ Type + VarDcl ::= ids ‘:’ Type + FunDcl ::= FunSig [‘:’ Type] + FunSig ::= id [FunTypeParamClause] ParamClauses + TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + + PatVarDef ::= ‘val’ PatDef + | ‘var’ VarDef + Def ::= PatVarDef + | ‘def’ FunDef + | ‘type’ {nl} TypeDef + | TmplDef + PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr + VarDef ::= PatDef + | ids ‘:’ Type ‘=’ ‘_’ + FunDef ::= FunSig [‘:’ Type] ‘=’ Expr + | FunSig [nl] ‘{’ Block ‘}’ + | ‘this’ ParamClause ParamClauses + (‘=’ ConstrExpr | [nl] ConstrBlock) + TypeDef ::= id [TypeParamClause] ‘=’ Type + + TmplDef ::= [‘case’] ‘class’ ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘trait’ TraitDef + ClassDef ::= id [TypeParamClause] {ConstrAnnotation} [AccessModifier] + ClassParamClauses ClassTemplateOpt + TraitDef ::= id [TypeParamClause] TraitTemplateOpt + ObjectDef ::= id ClassTemplateOpt + ClassTemplateOpt ::= ‘extends’ ClassTemplate | [[‘extends’] TemplateBody] + TraitTemplateOpt ::= ‘extends’ TraitTemplate | [[‘extends’] TemplateBody] + ClassTemplate ::= [EarlyDefs] ClassParents [TemplateBody] + TraitTemplate ::= [EarlyDefs] TraitParents [TemplateBody] + ClassParents ::= Constr {‘with’ AnnotType} + TraitParents ::= AnnotType {‘with’ AnnotType} + Constr ::= AnnotType {ArgumentExprs} + EarlyDefs ::= ‘{’ [EarlyDef {semi EarlyDef}] ‘}’ ‘with’ + EarlyDef ::= {Annotation [nl]} {Modifier} PatVarDef + + ConstrExpr ::= SelfInvocation + | ConstrBlock + ConstrBlock ::= ‘{’ SelfInvocation {semi BlockStat} ‘}’ + SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} + + TopStatSeq ::= TopStat {semi TopStat} + TopStat ::= {Annotation [nl]} {Modifier} TmplDef + | Import + | Packaging + | PackageObject + | + Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’ + PackageObject ::= ‘package’ ‘object’ ObjectDef + + CompilationUnit ::= {‘package’ QualId semi} TopStatSeq +``` + + diff --git a/docs/_spec/APPLIEDreference/dropped-features/class-shadowing.md b/docs/_spec/APPLIEDreference/dropped-features/class-shadowing.md new file mode 100644 index 000000000000..a27b53db7cce --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/class-shadowing.md @@ -0,0 +1,31 @@ +--- +layout: doc-page +title: "Dropped: Class Shadowing" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/class-shadowing.html +--- + +Scala 2 so far allowed patterns like this: + +```scala +class Base { + class Ops { ... } +} + +class Sub extends Base { + class Ops { ... } +} +``` + +Scala 3 rejects this with the error message: + +```scala +6 | class Ops { } + | ^ + |class Ops cannot have the same name as class Ops in class Base + | -- class definitions cannot be overridden +``` + +The issue is that the two `Ops` classes _look_ like one overrides the +other, but classes in Scala 2 cannot be overridden. To keep things clean +(and its internal operations consistent) the Scala 3 compiler forces you +to rename the inner classes so that their names are different. diff --git a/docs/_spec/Dockerfile b/docs/_spec/Dockerfile new file mode 100644 index 000000000000..1fc28081c59f --- /dev/null +++ b/docs/_spec/Dockerfile @@ -0,0 +1,26 @@ +FROM ruby:2.7 + +RUN apt-get install -y curl \ + && curl -sL https://deb.nodesource.com/setup_18.x | bash - \ + && apt-get install -y nodejs \ + && curl -L https://www.npmjs.com/install.sh | sh + +RUN gem update --system +RUN gem install sass-embedded -v 1.58.0 +RUN gem install bundler:1.17.2 jekyll + +WORKDIR /srv/jekyll + +COPY Gemfile . +COPY Gemfile.lock . + + +RUN echo -n "bundle version: " && bundle --version +RUN bundle install +RUN mkdir /opt/npm-global +RUN npm config set prefix '/opt/npm-global' +RUN npm config set global true +RUN npm install bower +RUN echo -n "npm version: " && npm --version +RUN chmod u+s /bin/chown +RUN date diff --git a/docs/_spec/Gemfile b/docs/_spec/Gemfile new file mode 100644 index 000000000000..bc45dc84db8c --- /dev/null +++ b/docs/_spec/Gemfile @@ -0,0 +1,8 @@ +# To build the spec on Travis CI +source "https://rubygems.org" + +gem "jekyll", "3.6.3" +gem "webrick" +gem "rouge" +# gem 's3_website' +gem "redcarpet", "3.5.1" diff --git a/docs/_spec/Gemfile.lock b/docs/_spec/Gemfile.lock new file mode 100644 index 000000000000..48efd373725e --- /dev/null +++ b/docs/_spec/Gemfile.lock @@ -0,0 +1,57 @@ +GEM + remote: https://rubygems.org/ + specs: + addressable (2.8.1) + public_suffix (>= 2.0.2, < 6.0) + colorator (1.1.0) + ffi (1.15.5) + forwardable-extended (2.6.0) + jekyll (3.6.3) + addressable (~> 2.4) + colorator (~> 1.0) + jekyll-sass-converter (~> 1.0) + jekyll-watch (~> 1.1) + kramdown (~> 1.14) + liquid (~> 4.0) + mercenary (~> 0.3.3) + pathutil (~> 0.9) + rouge (>= 1.7, < 3) + safe_yaml (~> 1.0) + jekyll-sass-converter (1.5.2) + sass (~> 3.4) + jekyll-watch (1.5.1) + listen (~> 3.0) + kramdown (1.17.0) + liquid (4.0.3) + listen (3.7.1) + rb-fsevent (~> 0.10, >= 0.10.3) + rb-inotify (~> 0.9, >= 0.9.10) + mercenary (0.3.6) + pathutil (0.16.2) + forwardable-extended (~> 2.6) + public_suffix (5.0.0) + rb-fsevent (0.11.2) + rb-inotify (0.10.1) + ffi (~> 1.0) + redcarpet (3.5.1) + rouge (2.2.1) + safe_yaml (1.0.5) + sass (3.7.4) + sass-listen (~> 4.0.0) + sass-listen (4.0.0) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + webrick (1.7.0) + +PLATFORMS + ruby + x86_64-linux + +DEPENDENCIES + jekyll (= 3.6.3) + redcarpet (= 3.5.1) + rouge + webrick + +BUNDLED WITH + 2.3.5 diff --git a/docs/_spec/README.md b/docs/_spec/README.md new file mode 100644 index 000000000000..b9eba413f8a2 --- /dev/null +++ b/docs/_spec/README.md @@ -0,0 +1,67 @@ +# WIP Scala 3 Language Specification + +**This is still a work in progress, and should *not* be regarded as a source of truth.** + +First of all, the language specification is meant to be correct, precise and clear. + +Second, editing, previewing and generating output for the markdown should be simple and easy. + +Third, we'd like to support different output formats. An html page per chapter with MathJax seems like a good start, as it satisfies the second requirement, and enables the first one. + +## Editing + +We are using Jekyll and [Redcarpet](https://github.com/vmg/redcarpet) to generate the html. + +Check `Gemfile` for the current versions. + +We aim to track the configuration GitHub Pages uses but differences may arise as GitHub Pages evolves. + +## Building + + +To preview locally, run the following commands in the docs/_spec subfolder: + +``` +env UID="$(id -u)" GID="$(id -g)" docker-compose up +``` + +and open http://0.0.0.0:4000/files/archive/spec/2.13/ to view the spec. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`. + + +## General Advice for editors + +- All files must be saved as UTF-8: ensure your editors are configured appropriately. +- Use of the appropriate unicode characters instead of the latex modifiers for accents, etc. is necessary. For example, é instead of `\'e`. +- MathJAX errors will appear within the rendered DOM as span elements with class `mtext` and style attribute `color: red` applied. It is possible to search for this combination in the development tools of the browser of your choice. In chrome, CTRL+F / CMD+F within the inspect element panel allows you to do this. + +- This document follows the "one sentence <=> one line" convention, with the following exceptions below. + - A multiline code block is part of the sentence + - An enumeration of links is long enough + +- Whenever doing an enumeration of the kind "a, ..., z", follow the following conventions: + - It should always be "separator whitespace period period period separator whitespace", for example `, ..., ` or `,\n...,\n` for multiline. + - If in a code block, only the elements (a and z above) should be in math mode (between forward ticks) + - If in a math expression, the whole thing should be in a single math mode + - Look at the [Tuple Types section](docs/_spec/03-types.html#tuple-types) for an example of the different cases above. + +- Try to use "Note" blocks to point out logical conclusions that are not obvious, for examples, look at the [Tuple Types section](docs/_spec/03-types.html#tuple-types). + +### Macro replacements: + +- While MathJAX just support LaTeX style command definition, it is recommended to not use this as it will likely cause issues with preparing the document for PDF or ebook distribution. +- `\SS` (which I could not find defined within the latex source) seems to be closest to `\mathscr{S}` +- `\TYPE` is equivalent to `\boldsymbol{type}' +- As MathJAX has no support for slanted font (latex command \sl), so in all instances this should be replaced with \mathit{} +- The macro \U{ABCD} used for unicode character references can be replaced with \\uABCD. +- The macro \URange{ABCD}{DCBA} used for unicode character ranges can be replaced with \\uABCD-\\uDBCA. +- The macro \commadots can be replaced with ` , … , ` (But should not, see above). +- There is no adequate replacement for `\textsc{...}` (small caps) in pandoc markdown. While unicode contains a number of small capital letters, it is notably missing Q and X as these glyphs are intended for phonetic spelling, therefore these cannot be reliably used. For now, the best option is to use underscore emphasis and capitalise the text manually, `_LIKE THIS_`. + +### Unicode Character replacements + +- The unicode left and right single quotation marks (‘ and ’ (U+2018 and U+2019, respectively)) have been used in place of ` and ', where the quotation marks are intended to be paired. These can be typed on a mac using Option+] for a left quote and Option+Shift+] for the right quote. +- Similarly for left and right double quotation marks (“ and ” (U+201C and U+201D, respectively)) in place of ". These can be typed on a mac using Option+[ and Option+Shift+]. diff --git a/docs/_spec/TODOreference/changed-features/changed-features.md b/docs/_spec/TODOreference/changed-features/changed-features.md new file mode 100644 index 000000000000..cacdc2598a02 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/changed-features.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Other Changed Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features.html +--- + +The following pages document the features that have changed in Scala 3, compared to Scala 2. diff --git a/docs/_spec/TODOreference/changed-features/compiler-plugins.md b/docs/_spec/TODOreference/changed-features/compiler-plugins.md new file mode 100644 index 000000000000..20bdb7f49836 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/compiler-plugins.md @@ -0,0 +1,128 @@ +--- +layout: doc-page +title: "Changes in Compiler Plugins" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/compiler-plugins.html +--- + +Compiler plugins are supported by Dotty (and Scala 3) since 0.9. There are two notable changes +compared to `scalac`: + +- No support for analyzer plugins +- Added support for research plugins + +[Analyzer plugins][1] in `scalac` run during type checking and may influence +normal type checking. This is a very powerful feature but for production usages, +a predictable and consistent type checker is more important. + +For experimentation and research, Scala 3 introduces _research plugin_. Research plugins +are more powerful than `scalac` analyzer plugins as they let plugin authors customize +the whole compiler pipeline. One can easily replace the standard typer by a custom one or +create a parser for a domain-specific language. However, research plugins are only +enabled for nightly or snaphot releases of Scala 3. + +Common plugins that add new phases to the compiler pipeline are called +_standard plugins_ in Scala 3. In terms of features, they are similar to +`scalac` plugins, despite minor changes in the API. + +## Using Compiler Plugins + +Both standard and research plugins can be used with `scalac` by adding the `-Xplugin:` option: + +```shell +scalac -Xplugin:pluginA.jar -Xplugin:pluginB.jar Test.scala +``` + +The compiler will examine the jar provided, and look for a property file named +`plugin.properties` in the root directory of the jar. The property file specifies +the fully qualified plugin class name. The format of a property file is as follows: + +```properties +pluginClass=dividezero.DivideZero +``` + +This is different from `scalac` plugins that required a `scalac-plugin.xml` file. + +Starting from 1.1.5, `sbt` also supports Scala 3 compiler plugins. Please refer to the +[`sbt` documentation][2] for more information. + +## Writing a Standard Compiler Plugin + +Here is the source code for a simple compiler plugin that reports integer divisions by +zero as errors. + +```scala +package dividezero + +import dotty.tools.dotc.ast.Trees.* +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.plugins.{PluginPhase, StandardPlugin} +import dotty.tools.dotc.transform.{Pickler, Staging} + +class DivideZero extends StandardPlugin: + val name: String = "divideZero" + override val description: String = "divide zero check" + + def init(options: List[String]): List[PluginPhase] = + (new DivideZeroPhase) :: Nil + +class DivideZeroPhase extends PluginPhase: + import tpd.* + + val phaseName = "divideZero" + + override val runsAfter = Set(Pickler.name) + override val runsBefore = Set(Staging.name) + + override def transformApply(tree: Apply)(implicit ctx: Context): Tree = + tree match + case Apply(Select(rcvr, nme.DIV), List(Literal(Constant(0)))) + if rcvr.tpe <:< defn.IntType => + report.error("dividing by zero", tree.pos) + case _ => + () + tree +end DivideZeroPhase +``` + +The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` +and implement the method `init` that takes the plugin's options as argument +and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. + +Our plugin adds one compiler phase to the pipeline. A compiler phase must extend +the `PluginPhase` trait. In order to specify when the phase is executed, we also +need to specify a `runsBefore` and `runsAfter` constraints that are list of phase +names. + +We can now transform trees by overriding methods like `transformXXX`. + +## Writing a Research Compiler Plugin + +Here is a template for research plugins. + +```scala +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.plugins.ResearchPlugin + +class DummyResearchPlugin extends ResearchPlugin: + val name: String = "dummy" + override val description: String = "dummy research plugin" + + def init(options: List[String], phases: List[List[Phase]])(implicit ctx: Context): List[List[Phase]] = + phases +end DummyResearchPlugin +``` + +A research plugin must extend the trait `ResearchPlugin` and implement the +method `init` that takes the plugin's options as argument as well as the compiler +pipeline in the form of a list of compiler phases. The method can replace, remove +or add any phases to the pipeline and return the updated pipeline. + + +[1]: https://github.com/scala/scala/blob/2.13.x/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +[2]: https://www.scala-sbt.org/1.x/docs/Compiler-Plugins.html diff --git a/docs/_spec/TODOreference/changed-features/eta-expansion-spec.md b/docs/_spec/TODOreference/changed-features/eta-expansion-spec.md new file mode 100644 index 000000000000..a62d45df9e11 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/eta-expansion-spec.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: "Automatic Eta Expansion - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/eta-expansion-spec.html +--- + +## Motivation + +Scala maintains a convenient distinction between _methods_ and _functions_. +Methods are part of the definition of a class that can be invoked in objects while functions are complete objects themselves, making them first-class entities. For example, they can be assigned to variables. +These two mechanisms are bridged in Scala by a mechanism called +[_eta-expansion_](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#eta-expansion-section) +(also called eta-abstraction), which converts a reference to a method into a function. Intuitively, a method `m` can be passed around by turning it into an object: the function `x => m(x)`. + +In this snippet which assigns a method to a `val`, the compiler will perform _automatic eta-expansion_, as shown in the comment: + +```scala +def m(x: Int, y: String) = ??? +val f = m // becomes: val f = (x: Int, y: String) => m(x, y) +``` + +In Scala 2, a method reference `m` is converted to a function value only if the expected type is a function type, which means the conversion in the example above would not have been triggered, because `val f` does not have a type ascription. To still get eta-expansion, a shortcut `m _` would force the conversion. + +For methods with one or more parameters like in the example above, this restriction has now been dropped. The syntax `m _` is no longer needed and will be deprecated in the future. + +## Automatic eta-expansion and partial application +In the following example `m` can be partially applied to the first two parameters. +Assigning `m` to `f1` will automatically eta-expand. + +```scala +def m(x: Boolean, y: String)(z: Int): List[Int] +val f1 = m +val f2 = m(true, "abc") +``` + +This creates two function values: + +```scala +f1: (Boolean, String) => Int => List[Int] +f2: Int => List[Int] +``` + +## Automatic eta-expansion and implicit parameter lists + +Methods with implicit parameter lists will always get applied to implicit arguments. + +```scala +def foo(x: Int)(implicit p: Double): Float = ??? +implicit val bla: Double = 1.0 + +val bar = foo // val bar: Int => Float = ... +``` + +## Automatic Eta-Expansion and query types + +A method with context parameters can be expanded to a value of a context type by writing the expected context type explicitly. + +```scala +def foo(x: Int)(using p: Double): Float = ??? +val bar: Double ?=> Float = foo(3) +``` + +## Rules + +- If `m` has an argument list with one or more parameters, we always eta-expand +- If `m` is has an empty argument list (i.e. has type `()R`): + 1. If the expected type is of the form `() => T`, we eta expand. + 2. If m is defined by Java, or overrides a Java defined method, we insert `()`. + 3. Otherwise we issue an error of the form: + +Thus, an unapplied method with an empty argument list is only converted to a function when a function type is expected. It is considered best practice to either explicitly apply the method to `()`, or convert it to a function with `() => m()`. + +The method value syntax `m _` is deprecated. + +## Reference + +For more information, see [PR #2701](https://github.com/lampepfl/dotty/pull/2701). diff --git a/docs/_spec/TODOreference/changed-features/eta-expansion.md b/docs/_spec/TODOreference/changed-features/eta-expansion.md new file mode 100644 index 000000000000..c05378135e54 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/eta-expansion.md @@ -0,0 +1,42 @@ +--- +layout: doc-page +title: "Automatic Eta Expansion" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/eta-expansion.html +--- + +The conversion of _methods_ into _functions_ has been improved and happens automatically for methods with one or more parameters. + +```scala +def m(x: Boolean, y: String)(z: Int): List[Int] +val f1 = m +val f2 = m(true, "abc") +``` + +This creates two function values: +```scala +f1: (Boolean, String) => Int => List[Int] +f2: Int => List[Int] +``` + +The syntax `m _` is no longer needed and will be deprecated in the future. + +## Automatic eta-expansion and nullary methods + +Automatic eta expansion does not apply to "nullary" methods that take an empty parameter list. + +```scala +def next(): T +``` + +Given a simple reference to `next` does not auto-convert to a function. +One has to write explicitly `() => next()` to achieve that. +Once again since the `_` is going to be deprecated it's better to write it this way +rather than `next _`. + +The reason for excluding nullary methods from automatic eta expansion +is that Scala implicitly inserts the `()` argument, which would +conflict with eta expansion. Automatic `()` insertion is +[limited](../dropped-features/auto-apply.md) in Scala 3, but the fundamental ambiguity +remains. + +[More details](eta-expansion-spec.md) diff --git a/docs/_spec/TODOreference/changed-features/implicit-conversions-spec.md b/docs/_spec/TODOreference/changed-features/implicit-conversions-spec.md new file mode 100644 index 000000000000..dc19e10c8b8f --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/implicit-conversions-spec.md @@ -0,0 +1,117 @@ +--- +layout: doc-page +title: "Implicit Conversions - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-conversions-spec.html +--- + +## Implementation + +An implicit conversion, or _view_, from type `S` to type `T` is +defined by either: + +- An `implicit def` which has type `S => T` or `(=> S) => T` +- An implicit value which has type `Conversion[S, T]` + +The standard library defines an abstract class [`Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html): + +```scala +package scala +@java.lang.FunctionalInterface +abstract class Conversion[-T, +U] extends Function1[T, U]: + def apply(x: T): U +``` + +Function literals are automatically converted to `Conversion` values. + +Views are applied in three situations: + +1. If an expression `e` is of type `T`, and `T` does not conform to + the expression's expected type `pt`. In this case, an implicit `v` + which is applicable to `e` and whose result type conforms to `pt` + is searched. The search proceeds as in the case of implicit + parameters, where the implicit scope is the one of `T => pt`. If + such a view is found, the expression `e` is converted to `v(e)`. +1. In a selection `e.m` with `e` of type `T`, if the selector `m` does + not denote an accessible member of `T`. In this case, a view `v` + which is applicable to `e` and whose result contains an accessible + member named `m` is searched. The search proceeds as in the case of + implicit parameters, where the implicit scope is the one of `T`. If + such a view is found, the selection `e.m` is converted to `v(e).m`. +1. In an application `e.m(args)` with `e` of type `T`, if the selector + `m` denotes some accessible member(s) of `T`, but none of these + members is applicable to the arguments `args`. In this case, a view + `v` which is applicable to `e` and whose result contains a method + `m` which is applicable to `args` is searched. The search proceeds + as in the case of implicit parameters, where the implicit scope is + the one of `T`. If such a view is found, the application + `e.m(args)` is converted to `v(e).m(args)`. + +# Differences with Scala 2 implicit conversions + +In Scala 2, views whose parameters are passed by-value take precedence +over views whose parameters are passed by-name. This is no longer the +case in Scala 3. A type error reporting the ambiguous conversions will +be emitted in cases where this rule would be applied in Scala 2: + +```scala +implicit def conv1(x: Int): String = x.toString +implicit def conv2(x: => Int): String = x.toString + +val x: String = 0 // Compiles in Scala2 (uses `conv1`), + // type error in Scala 3 because of ambiguity. +``` + +In Scala 2, implicit values of a function type would be considered as +potential views. In Scala 3, these implicit value need to have type +`Conversion`: + +```scala +// Scala 2: +def foo(x: Int)(implicit conv: Int => String): String = x + +// Becomes with Scala 3: +def foo(x: Int)(implicit conv: Conversion[Int, String]): String = x + +// Call site is unchanged: +foo(4)(_.toString) + +// Scala 2: +implicit val myConverter: Int => String = _.toString + +// Becomes with Scala 3: +implicit val myConverter: Conversion[Int, String] = _.toString +``` + +Note that implicit conversions are also affected by the [changes to implicit resolution](implicit-resolution.md) between Scala 2 and Scala 3. + +## Motivation for the changes + +The introduction of [`scala.Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html) +in Scala 3 and the decision to restrict implicit values of this type to be +considered as potential views comes from the desire to remove surprising +behavior from the language: + +```scala +implicit val m: Map[Int, String] = Map(1 -> "abc") + +val x: String = 1 // Scala 2: assigns "abc" to x + // Scala 3: type error +``` + +This snippet contains a type error. The right-hand side of `val x` +does not conform to type `String`. In Scala 2, the compiler will use +`m` as an implicit conversion from `Int` to `String`, whereas Scala 3 +will report a type error, because `Map` isn't an instance of +[`Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html). + +## Migration path + +Implicit values that are used as views should see their type changed to `Conversion`. + +For the migration of implicit conversions that are affected by the +changes to implicit resolution, refer to the [Changes in Implicit Resolution](implicit-resolution.md) for more information. + +## Reference + +For more information about implicit resolution, see [Changes in Implicit Resolution](implicit-resolution.md). +Other details are available in [PR #2065](https://github.com/lampepfl/dotty/pull/2065). diff --git a/docs/_spec/TODOreference/changed-features/implicit-conversions.md b/docs/_spec/TODOreference/changed-features/implicit-conversions.md new file mode 100644 index 000000000000..eef236f39a07 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/implicit-conversions.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "Implicit Conversions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-conversions.html +--- + +An _implicit conversion_, also called _view_, is a conversion that +is applied by the compiler in several situations: + +1. When an expression `e` of type `T` is encountered, but the compiler + needs an expression of type `S`. +1. When an expression `e.m` where `e` has type `T` but `T` defines no + member `m` is encountered. + +In those cases, the compiler looks in the implicit scope for a +conversion that can convert an expression of type `T` to an expression +of type `S` (or to a type that defines a member `m` in the second +case). + +This conversion can be either: + +1. An `implicit def` of type `T => S` or `(=> T) => S` +1. An implicit value of type `scala.Conversion[T, S]` + +Defining an implicit conversion will emit a warning unless the import +`scala.language.implicitConversions` is in scope, or the flag +`-language:implicitConversions` is given to the compiler. + +## Examples + +The first example is taken from [`scala.Predef`](https://scala-lang.org/api/3.x/scala/Predef$.html). +Thanks to this implicit conversion, it is possible to pass a +[`scala.Int`](https://scala-lang.org/api/3.x/scala/Int.html) +to a Java method that expects a `java.lang.Integer` + +```scala +import scala.language.implicitConversions +implicit def int2Integer(x: Int): java.lang.Integer = + x.asInstanceOf[java.lang.Integer] +``` + +The second example shows how to use `Conversion` to define an +`Ordering` for an arbitrary type, given existing `Ordering`s for other +types: + +```scala +import scala.language.implicitConversions +implicit def ordT[T, S]( + implicit conv: Conversion[T, S], + ordS: Ordering[S] + ): Ordering[T] = + // `ordS` compares values of type `S`, but we can convert from `T` to `S` + (x: T, y: T) => ordS.compare(x, y) + +class A(val x: Int) // The type for which we want an `Ordering` + +// Convert `A` to a type for which an `Ordering` is available: +implicit val AToInt: Conversion[A, Int] = _.x + +implicitly[Ordering[Int]] // Ok, exists in the standard library +implicitly[Ordering[A]] // Ok, will use the implicit conversion from + // `A` to `Int` and the `Ordering` for `Int`. +``` + +[More details](implicit-conversions-spec.md) diff --git a/docs/_spec/TODOreference/changed-features/implicit-resolution.md b/docs/_spec/TODOreference/changed-features/implicit-resolution.md new file mode 100644 index 000000000000..bf15baa3299c --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/implicit-resolution.md @@ -0,0 +1,169 @@ +--- +layout: doc-page +title: "Changes in Implicit Resolution" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-resolution.html +--- + +This section describes changes to the implicit resolution that apply both to the new `given`s and to the old-style `implicit`s in Scala 3. +Implicit resolution uses a new algorithm which caches implicit results +more aggressively for performance. There are also some changes that +affect implicits on the language level. + +**1.** Types of implicit values and result types of implicit methods +must be explicitly declared. Excepted are only values in local blocks +where the type may still be inferred: +```scala + class C { + + val ctx: Context = ... // ok + + /*!*/ implicit val x = ... // error: type must be given explicitly + + /*!*/ implicit def y = ... // error: type must be given explicitly + } + val y = { + implicit val ctx = this.ctx // ok + ... + } +``` +**2.** Nesting is now taken into account for selecting an implicit. Consider for instance the following scenario: +```scala + def f(implicit i: C) = { + def g(implicit j: C) = { + implicitly[C] + } + } +``` +This will now resolve the `implicitly` call to `j`, because `j` is nested +more deeply than `i`. Previously, this would have resulted in an +ambiguity error. The previous possibility of an implicit search failure +due to _shadowing_ (where an implicit is hidden by a nested definition) +no longer applies. + +**3.** Package prefixes no longer contribute to the implicit search scope of a type. Example: +```scala + package p + + given a: A = A() + + object o: + given b: B = B() + type C +``` +Both `a` and `b` are visible as implicits at the point of the definition +of `type C`. However, a reference to `p.o.C` outside of package `p` will +have only `b` in its implicit search scope but not `a`. + +In more detail, here are the rules for what constitutes the implicit scope of +a type: + +**Definition:** A reference is an _anchor_ if it refers to an object, a class, a trait, an abstract type, an opaque type alias, or a match type alias. References to packages and package objects are anchors only under `-source:3.0-migration`. +Opaque type aliases count as anchors only outside the scope where their alias is visible. + +**Definition:** The _anchors_ of a type _T_ is a set of references defined as follows: + + 1. If _T_ is a reference to an anchor, _T_ itself plus, if _T_ is of the form _P#A_, the anchors of _P_. + 1. If _T_ is an alias of _U_, the anchors of _U_. + 1. If _T_ is a reference to a type parameter, the union of the anchors of both of its bounds. + 1. If _T_ is a singleton reference, the anchors of its underlying type, plus, + if _T_ is of the form _(P#x).type_, the anchors of _P_. + 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object. + 1. If _T_ is some other type, the union of the anchors of each constituent type of _T_. + + **Definition:** The _implicit scope_ of a type _T_ is the smallest set _S_ of term references such that + + 1. If _T_ is a reference to a class, _S_ includes a reference to the companion object + of the class, if it exists, as well as the implicit scopes of all of _T_'s parent classes. + 1. If _T_ is a reference to an object, _S_ includes _T_ itself as well as + the implicit scopes of all of _T_'s parent classes. + 1. If _T_ is a reference to an opaque type alias named _A_, _S_ includes + a reference to an object _A_ defined in the same scope as the type, if it exists, + as well as the implicit scope of _T_'s underlying type or bounds. + 1. If _T_ is a reference to an abstract type or match type alias + named _A_, _S_ includes a reference to an object _A_ defined in the same scope as the type, if it exists, as well as the implicit scopes of _T_'s given bounds. + 1. If _T_ is a reference to an anchor of the form _p.A_ then _S_ also includes + all term references on the path _p_. + 1. If _T_ is some other type, _S_ includes the implicit scopes of all anchors of _T_. + + +**4.** The treatment of ambiguity errors has changed. If an ambiguity is encountered in some recursive step of an implicit search, the ambiguity is propagated to the caller. + +Example: Say you have the following definitions: +```scala + class A + class B extends C + class C + implicit def a1: A + implicit def a2: A + implicit def b(implicit a: A): B + implicit def c: C +``` +and the query `implicitly[C]`. + +This query would now be classified as ambiguous. This makes sense, after all +there are two possible solutions, `b(a1)` and `b(a2)`, neither of which is better +than the other and both of which are better than the third solution, `c`. +By contrast, Scala 2 would have rejected the search for `A` as +ambiguous, and subsequently have classified the query `b(implicitly[A])` as a normal fail, +which means that the alternative `c` would be chosen as solution! + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement +the analogue of a "negated" search in implicit resolution, where a query `Q1` fails if some +other query `Q2` succeeds and `Q1` succeeds if `Q2` fails. With the new cleaned up behavior +these techniques no longer work. But there is now a new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) +which implements negation directly. For any query type `Q`, `NotGiven[Q]` succeeds if and only if +the implicit search for `Q` fails. + +**5.** The treatment of divergence errors has also changed. A divergent implicit is treated as a normal failure, after which alternatives are still tried. This also makes sense: Encountering a divergent implicit means that we assume that no finite solution can be found on the corresponding path, but another path can still be tried. By contrast, +most (but not all) divergence errors in Scala 2 would terminate the implicit search as a whole. + +**6.** Scala 2 gives a lower level of priority to implicit conversions with call-by-name parameters relative to implicit conversions with call-by-value parameters. Scala 3 drops this distinction. So the following code snippet would be ambiguous in Scala 3: + +```scala + implicit def conv1(x: Int): A = new A(x) + implicit def conv2(x: => Int): A = new A(x) + def buzz(y: A) = ??? + buzz(1) // error: ambiguous +``` +**7.** The rule for picking a _most specific_ alternative among a set of overloaded or implicit alternatives is refined to take context parameters into account. All else being equal, an alternative that takes some context parameters is taken to be less specific than an alternative that takes none. If both alternatives take context parameters, we try to choose between them as if they were methods with regular parameters. The following paragraph in the [SLS §6.26.3](https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#overloading-resolution) is affected by this change: + +_Original version:_ + +> An alternative A is _more specific_ than an alternative B if the relative weight of A over B is greater than the relative weight of B over A. + +_Modified version:_ + +An alternative A is _more specific_ than an alternative B if + + - the relative weight of A over B is greater than the relative weight of B over A, or + - the relative weights are the same, and A takes no implicit parameters but B does, or + - the relative weights are the same, both A and B take implicit parameters, and A is more specific than B if all implicit parameters in either alternative are replaced by regular parameters. + +**8.** The previous disambiguation of implicits based on inheritance depth is refined to make it transitive. Transitivity is important to guarantee that search outcomes are compilation-order independent. Here's a scenario where the previous rules violated transitivity: +```scala + class A extends B + object A { given a ... } + class B + object B extends C { given b ... } + class C { given c } +``` + Here `a` is more specific than `b` since the companion class `A` is a subclass of the companion class `B`. Also, `b` is more specific than `c` + since `object B` extends class `C`. But `a` is not more specific than `c`. This means if `a, b, c` are all applicable implicits, it makes + a difference in what order they are compared. If we compare `b` and `c` + first, we keep `b` and drop `c`. Then, comparing `a` with `b` we keep `a`. But if we compare `a` with `c` first, we fail with an ambiguity error. + +The new rules are as follows: An implicit `a` defined in `A` is more specific than an implicit `b` defined in `B` if + + - `A` extends `B`, or + - `A` is an object and the companion class of `A` extends `B`, or + - `A` and `B` are objects, + `B` does not inherit any implicit members from base classes (*), + and the companion class of `A` extends the companion class of `B`. + +Condition (*) is new. It is necessary to ensure that the defined relation is transitive. + + + + + +[//]: # todo: expand with precise rules diff --git a/docs/_spec/TODOreference/changed-features/imports.md b/docs/_spec/TODOreference/changed-features/imports.md new file mode 100644 index 000000000000..2058ef08b7db --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/imports.md @@ -0,0 +1,60 @@ +--- +layout: doc-page +title: "Imports" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/imports.html +--- + +The syntax of wildcard and renaming imports (and exports) has changed. + +## Wildcard Imports + +Wildcard imports are now expressed with `*` instead of underscore. Example: +```scala +import scala.annotation.* // imports everything in the annotation package +``` + +If you want to import a member named `*` specifically, you can use backticks around it. + +```scala +object A: + def * = ... + def min = ... + +object B: + import A.`*` // imports just `*` + +object C: + import A.* // imports everything in A +``` + +## Renaming Imports + +To rename or exclude an import, we now use `as` instead of `=>`. A single renaming import no longer needs to be enclosed in braces. Examples: + +```scala +import A.{min as minimum, `*` as multiply} +import Predef.{augmentString as _, *} // imports everything except augmentString +import scala.annotation as ann +import java as j +``` + +## Migration + +To support cross-building, Scala 3.0 supports the old import syntax with `_` for wildcards and `=>` for renamings in addition to the new one. The old syntax +will be dropped in a future versions. Automatic rewritings from old to new syntax +are offered under settings `-source 3.1-migration -rewrite`. + +## Syntax + +``` +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec + | SimpleRef `as` id +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*' | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +``` diff --git a/docs/_spec/TODOreference/changed-features/interpolation-escapes.md b/docs/_spec/TODOreference/changed-features/interpolation-escapes.md new file mode 100644 index 000000000000..594e7671c5ab --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/interpolation-escapes.md @@ -0,0 +1,14 @@ +--- +layout: doc-page +title: "Escapes in interpolations" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/interpolation-escapes.html +--- + +In Scala 2 there is no straightforward way to represent a single quote character `"` in a single quoted interpolation. A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. + +In Scala 3, we can use the `$` meta character of interpolations to escape a `"` character. Example: + +```scala + val inventor = "Thomas Edison" + val interpolation = s"as $inventor said: $"The three great essentials to achieve anything worth while are: Hard work, Stick-to-itiveness, and Common sense.$"" +``` diff --git a/docs/_spec/TODOreference/changed-features/lazy-vals-init.md b/docs/_spec/TODOreference/changed-features/lazy-vals-init.md new file mode 100644 index 000000000000..131ac6ad7bb2 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/lazy-vals-init.md @@ -0,0 +1,80 @@ +--- +layout: doc-page +title: Lazy Vals Initialization +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/lazy-vals-init.html +--- + +Scala 3 implements [Version 6](https://docs.scala-lang.org/sips/improved-lazy-val-initialization.html#version-6---no-synchronization-on-this-and-concurrent-initialization-of-fields) +of the [SIP-20] improved lazy vals initialization proposal. + +## Motivation + +The newly proposed lazy val initialization mechanism aims to eliminate the acquisition of resources +during the execution of the lazy val initializer block, thus reducing the possibility of a deadlock. +The concrete deadlock scenarios that the new lazy val initialization scheme eliminates are +summarized in the [SIP-20] document. + +## Implementation + +Given a lazy field of the form: + +```scala +class Foo { + lazy val bar = +} +``` + +The Scala 3 compiler will generate code equivalent to: + +```scala +class Foo { + import scala.runtime.LazyVals + var value_0: Int = _ + var bitmap: Long = 0L + val bitmap_offset: Long = LazyVals.getOffset(classOf[LazyCell], "bitmap") + + def bar(): Int = { + while (true) { + val flag = LazyVals.get(this, bitmap_offset) + val state = LazyVals.STATE(flag, ) + + if (state == ) { + return value_0 + } else if (state == ) { + if (LazyVals.CAS(this, bitmap_offset, flag, , )) { + try { + val result = + value_0 = result + LazyVals.setFlag(this, bitmap_offset, , ) + return result + } + catch { + case ex => + LazyVals.setFlag(this, bitmap_offset, , ) + throw ex + } + } + } else /* if (state == || state == ) */ { + LazyVals.wait4Notification(this, bitmap_offset, flag, ) + } + } + } +} +``` + +The state of the lazy val `` is represented with 4 values: 0, 1, 2 and 3. The state 0 +represents a non-initialized lazy val. The state 1 represents a lazy val that is currently being +initialized by some thread. The state 2 denotes that there are concurrent readers of the lazy val. +The state 3 represents a lazy val that has been initialized. `` is the id of the lazy +val. This id grows with the number of volatile lazy vals defined in the class. + +## Note on recursive lazy vals + +Ideally recursive lazy vals should be flagged as an error. The current behavior for +recursive lazy vals is undefined (initialization may result in a deadlock). + +## Reference + +* [SIP-20] + +[SIP-20]: https://docs.scala-lang.org/sips/improved-lazy-val-initialization.html diff --git a/docs/_spec/TODOreference/changed-features/main-functions.md b/docs/_spec/TODOreference/changed-features/main-functions.md new file mode 100644 index 000000000000..4460300d003e --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/main-functions.md @@ -0,0 +1,87 @@ +--- +layout: doc-page +title: "Main Methods" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/main-functions.html +--- + +Scala 3 offers a new way to define programs that can be invoked from the command line: +A [`@main`](https://scala-lang.org/api/3.x/scala/main.html) annotation on a method turns this method into an executable program. +Example: + +```scala +@main def happyBirthday(age: Int, name: String, others: String*) = + val suffix = + age % 100 match + case 11 | 12 | 13 => "th" + case _ => + age % 10 match + case 1 => "st" + case 2 => "nd" + case 3 => "rd" + case _ => "th" + val bldr = new StringBuilder(s"Happy $age$suffix birthday, $name") + for other <- others do bldr.append(" and ").append(other) + bldr.toString +``` + +This would generate a main program `happyBirthday` that could be called like this + +``` +> scala happyBirthday 23 Lisa Peter +Happy 23rd birthday, Lisa and Peter +``` + +A [`@main`](https://scala-lang.org/api/3.x/scala/main.html) annotated method can be written either at the top-level or in a statically accessible object. The name of the program is in each case the name of the method, without any object prefixes. The [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method can have an arbitrary number of parameters. +For each parameter type there must be an instance of the [`scala.util.CommandLineParser.FromString[T]`](https://scala-lang.org/api/3.x/scala/util/CommandLineParser$$FromString.html) type class that is used to convert an argument string to the required parameter type `T`. +The parameter list of a main method can end in a repeated parameter that then takes all remaining arguments given on the command line. + +The program implemented from a [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method checks that there are enough arguments on +the command line to fill in all parameters, and that argument strings are convertible to +the required types. If a check fails, the program is terminated with an error message. + +Examples: + +``` +> scala happyBirthday 22 +Illegal command line after first argument: more arguments expected + +> scala happyBirthday sixty Fred +Illegal command line: java.lang.NumberFormatException: For input string: "sixty" +``` + +The Scala compiler generates a program from a [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method `f` as follows: + + - It creates a class named `f` in the package where the [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method was found + - The class has a static method `main` with the usual signature. It takes an `Array[String]` + as argument and returns [`Unit`](https://scala-lang.org/api/3.x/scala/Unit.html). + - The generated `main` method calls method `f` with arguments converted using + methods in the [`scala.util.CommandLineParser`](https://scala-lang.org/api/3.x/scala/util/CommandLineParser$.html) object. + +For instance, the `happyBirthDay` method above would generate additional code equivalent to the following class: + +```scala +final class happyBirthday: + import scala.util.CommandLineParser as CLP + def main(args: Array[String]): Unit = + try + happyBirthday( + CLP.parseArgument[Int](args, 0), + CLP.parseArgument[String](args, 1), + CLP.parseRemainingArguments[String](args, 2)) + catch + case error: CLP.ParseError => CLP.showError(error) +``` + +**Note**: The `` modifier above expresses that the `main` method is generated +as a static method of class `happyBirthDay`. It is not available for user programs in Scala. Regular "static" members are generated in Scala using objects instead. + +[`@main`](https://scala-lang.org/api/3.x/scala/main.html) methods are the recommended scheme to generate programs that can be invoked from the command line in Scala 3. They replace the previous scheme to write program as objects with a special `App` parent class. In Scala 2, `happyBirthday` could be written also like this: + +```scala +object happyBirthday extends App: + // needs by-hand parsing of arguments vector + ... +``` + +The previous functionality of [`App`](https://www.scala-lang.org/api/3.x/scala/App.html), which relied on the "magic" [`DelayedInit`](../dropped-features/delayed-init.md) trait, is no longer available. [`App`](https://scala-lang.org/api/3.x/scala/App.html) still exists in limited form for now, but it does not support command line arguments and will be deprecated in the future. If programs need to cross-build +between Scala 2 and Scala 3, it is recommended to use an explicit `main` method with an `Array[String]` argument instead. diff --git a/docs/_spec/TODOreference/changed-features/match-syntax.md b/docs/_spec/TODOreference/changed-features/match-syntax.md new file mode 100644 index 000000000000..dba50e9beb6a --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/match-syntax.md @@ -0,0 +1,56 @@ +--- +layout: doc-page +title: "Match Expressions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/match-syntax.html +--- + +The syntactical precedence of match expressions has been changed. +`match` is still a keyword, but it is used like an alphabetical operator. This has several consequences: + + 1. `match` expressions can be chained: + + ```scala + xs match { + case Nil => "empty" + case _ => "nonempty" + } match { + case "empty" => 0 + case "nonempty" => 1 + } + ``` + + (or, dropping the optional braces) + + ```scala + xs match + case Nil => "empty" + case _ => "nonempty" + match + case "empty" => 0 + case "nonempty" => 1 + ``` + + 2. `match` may follow a period: + + ```scala + if xs.match + case Nil => false + case _ => true + then "nonempty" + else "empty" + ``` + + 3. The scrutinee of a match expression must be an `InfixExpr`. Previously the scrutinee could be followed by a type ascription `: T`, but this is no longer supported. So `x : T match { ... }` now has to be + written `(x: T) match { ... }`. + +## Syntax + +The new syntax of match expressions is as follows. + +``` +InfixExpr ::= ... + | InfixExpr MatchClause +SimpleExpr ::= ... + | SimpleExpr ‘.’ MatchClause +MatchClause ::= ‘match’ ‘{’ CaseClauses ‘}’ +``` diff --git a/docs/_spec/TODOreference/changed-features/numeric-literals.md b/docs/_spec/TODOreference/changed-features/numeric-literals.md new file mode 100644 index 000000000000..bba837dbf67d --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/numeric-literals.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Numeric Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/numeric-literals.html +--- + +[Document was moved](../experimental/numeric-literals.md) diff --git a/docs/_spec/TODOreference/changed-features/operators.md b/docs/_spec/TODOreference/changed-features/operators.md new file mode 100644 index 000000000000..0cf25d77bc11 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/operators.md @@ -0,0 +1,173 @@ +--- +layout: doc-page +title: "Rules for Operators" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/operators.html +--- + +The rules for infix operators have changed in some parts: + +First, an alphanumeric method can be used as an infix operator only if its definition carries an `infix` modifier. + +Second, it is recommended (but not enforced) to augment definitions of symbolic operators +with [`@targetName` annotations](../other-new-features/targetName.md). + +Finally, a syntax change allows infix operators to be written on the left in a multi-line expression. + +## The `infix` Modifier + +An `infix` modifier on a method definition allows using the method as an infix operation. Example: + +```scala +import scala.annotation.targetName + +trait MultiSet[T]: + + infix def union(other: MultiSet[T]): MultiSet[T] + + def difference(other: MultiSet[T]): MultiSet[T] + + @targetName("intersection") + def *(other: MultiSet[T]): MultiSet[T] + +end MultiSet + +val s1, s2: MultiSet[Int] + +s1 union s2 // OK +s1 `union` s2 // also OK but unusual +s1.union(s2) // also OK + +s1.difference(s2) // OK +s1 `difference` s2 // OK +s1 difference s2 // gives a deprecation warning + +s1 * s2 // OK +s1 `*` s2 // also OK, but unusual +s1.*(s2) // also OK, but unusual +``` + +Infix operations involving alphanumeric operators are deprecated, unless +one of the following conditions holds: + + - the operator definition carries an `infix` modifier, or + - the operator was compiled with Scala 2, or + - the operator is followed by an opening brace. + +An alphanumeric operator is an operator consisting entirely of letters, digits, the `$` and `_` characters, or +any Unicode character `c` for which `java.lang.Character.isIdentifierPart(c)` returns `true`. + +Infix operations involving symbolic operators are always allowed, so `infix` is redundant for methods with symbolic names. + +The `infix` modifier can also be given to a type: + +```scala +infix type or[X, Y] +val x: String or Int = ... +``` + +### Motivation + +The purpose of the `infix` modifier is to achieve consistency across a code base in how a method or type is applied. The idea is that the author of a method decides whether that method should be applied as an infix operator or in a regular application. Use sites then implement that decision consistently. + +### Details + + 1. `infix` is a soft modifier. It is treated as a normal identifier except when in modifier position. + + 2. If a method overrides another, their infix annotations must agree. Either both are annotated with `infix`, or none of them are. + + 3. `infix` modifiers can be given to method definitions. The first non-receiver parameter list of an `infix` method must define exactly one parameter. Examples: + + ```scala + infix def op1(x: S): R // ok + infix def op2[T](x: T)(y: S): R // ok + infix def op3[T](x: T, y: S): R // error: two parameters + + extension (x: A) + infix def op4(y: B): R // ok + infix def op5(y1: B, y2: B): R // error: two parameters + ``` + + 4. `infix` modifiers can also be given to type, trait or class definitions that have exactly two type parameters. An infix type like + + ```scala + infix type op[X, Y] + ``` + + can be applied using infix syntax, i.e. `A op B`. + + 5. To smooth migration to Scala 3.0, alphanumeric operators will only be deprecated from Scala 3.1 onwards, +or if the `-source future` option is given in Dotty/Scala 3. + +## The `@targetName` Annotation + +It is recommended that definitions of symbolic operators carry a [`@targetName` annotation](../other-new-features/targetName.md) that provides an encoding of the operator with an alphanumeric name. This has several benefits: + + - It helps interoperability between Scala and other languages. One can call + a Scala-defined symbolic operator from another language using its target name, + which avoids having to remember the low-level encoding of the symbolic name. + - It helps legibility of stacktraces and other runtime diagnostics, where the + user-defined alphanumeric name will be shown instead of the low-level encoding. + - It serves as a documentation tool by providing an alternative regular name + as an alias of a symbolic operator. This makes the definition also easier + to find in a search. + +## Syntax Change + +Infix operators can now appear at the start of lines in a multi-line expression. Examples: + +```scala +val str = "hello" + ++ " world" + ++ "!" + +def condition = + x > 0 + || + xs.exists(_ > 0) + || xs.isEmpty +``` + +Previously, those expressions would have been rejected, since the compiler's semicolon inference +would have treated the continuations `++ " world"` or `|| xs.isEmpty` as separate statements. + +To make this syntax work, the rules are modified to not infer semicolons in front of leading infix operators. +A _leading infix operator_ is + - a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks that + - starts a new line, and + - is not following a blank line, and + - is followed by at least one whitespace character and a token that can start an expression. + - Furthermore, if the operator appears on its own line, the next line must have at least + the same indentation width as the operator. + +Example: + +```scala + freezing + | boiling +``` + +This is recognized as a single infix operation. Compare with: + +```scala + freezing + !boiling +``` + +This is seen as two statements, `freezing` and `!boiling`. The difference is that only the operator in the first example +is followed by a space. + +Another example: + +```scala + println("hello") + ??? + ??? match { case 0 => 1 } +``` + +This code is recognized as three different statements. `???` is syntactically a symbolic identifier, but +neither of its occurrences is followed by a space and a token that can start an expression. + +## Unary operators + +A unary operator must not have explicit parameter lists even if they are empty. +A unary operator is a method named "unary_`op`" where `op` is one of `+`, `-`, `!`, or `~`. diff --git a/docs/_spec/TODOreference/changed-features/overload-resolution.md b/docs/_spec/TODOreference/changed-features/overload-resolution.md new file mode 100644 index 000000000000..621515c2a7f8 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/overload-resolution.md @@ -0,0 +1,102 @@ +--- +layout: doc-page +title: "Changes in Overload Resolution" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/overload-resolution.html +--- + +Overload resolution in Scala 3 improves on Scala 2 in three ways. +First, it takes all argument lists into account instead of +just the first argument list. +Second, it can infer parameter types of function values even if they +are in the first argument list. +Third, default arguments are no longer relevant for prioritization. + +## Looking Beyond the First Argument List + +Overloading resolution now can take argument lists into account when +choosing among a set of overloaded alternatives. +For example, the following code compiles in Scala 3, while it results in an +ambiguous overload error in Scala 2: + +```scala +def f(x: Int)(y: String): Int = 0 +def f(x: Int)(y: Int): Int = 0 + +f(3)("") // ok +``` + +The following code compiles as well: + +```scala +def g(x: Int)(y: Int)(z: Int): Int = 0 +def g(x: Int)(y: Int)(z: String): Int = 0 + +g(2)(3)(4) // ok +g(2)(3)("") // ok +``` + +To make this work, the rules for overloading resolution in [SLS §6.26.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#overloading-resolution) are augmented +as follows: + +> In a situation where a function is applied to more than one argument list, if overloading +resolution yields several competing alternatives when `n >= 1` parameter lists are taken +into account, then resolution re-tried using `n + 1` argument lists. + +This change is motivated by the new language feature +[extension methods](../contextual/extension-methods.md), where emerges the need to do +overload resolution based on additional argument blocks. + +## Parameter Types of Function Values + +The handling of function values with missing parameter types has been improved. We can now +pass such values in the first argument list of an overloaded application, provided +that the remaining parameters suffice for picking a variant of the overloaded function. +For example, the following code compiles in Scala 3, while it results in a +missing parameter type error in Scala2: + +```scala +def f(x: Int, f2: Int => Int) = f2(x) +def f(x: String, f2: String => String) = f2(x) +f("a", _.toUpperCase) +f(2, _ * 2) +``` + +To make this work, the rules for overloading resolution in [SLS §6.26.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#overloading-resolution) are modified +as follows: + +Replace the sentence + +> Otherwise, let `S1,...,Sm` be the vector of types obtained by typing each argument with an undefined expected type. + +with the following paragraph: + +> Otherwise, let `S1,...,Sm` be the vector of known types of all argument types, where the _known type_ of an argument `E` +is determined as followed: + + - If `E` is a function value `(p_1, ..., p_n) => B` that misses some parameter types, the known type + of `E` is `(S_1, ..., S_n) => ?`, where each `S_i` is the type of parameter `p_i` if it is given, or `?` + otherwise. Here `?` stands for a _wildcard type_ that is compatible with every other type. + - Otherwise the known type of `E` is the result of typing `E` with an undefined expected type. + +A pattern matching closure + +```scala +{ case P1 => B1 ... case P_n => B_n } +```` + +is treated as if it was expanded to the function value + +```scala +x => x match { case P1 => B1 ... case P_n => B_n } +``` + +and is therefore also approximated with a `? => ?` type. + +## Default Arguments Are No longer Relevant for Prioritization + +In Scala 2 if among several applicative alternatives one alternative had default arguments, that alternative was dropped from consideration. This has the unfortunate +side effect that adding a default to a parameter of a method can render this method +invisible in overloaded calls. + +Scala 3 drops this distinction. Methods with default parameters are not treated +to have lower priority than other methods. diff --git a/docs/_spec/TODOreference/changed-features/pattern-bindings.md b/docs/_spec/TODOreference/changed-features/pattern-bindings.md new file mode 100644 index 000000000000..2de338fc1dde --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/pattern-bindings.md @@ -0,0 +1,59 @@ +--- +layout: doc-page +title: "Pattern Bindings" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html +--- + +In Scala 2, pattern bindings in `val` definitions and `for` expressions are +loosely typed. Potentially failing matches are still accepted at compile-time, +but may influence the program's runtime behavior. +From Scala 3.2 on, type checking rules will be tightened so that warnings are reported at compile-time instead. + +## Bindings in Pattern Definitions + +```scala +val xs: List[Any] = List(1, 2, 3) +val (x: String) :: _ = xs // error: pattern's type String is more specialized + // than the right-hand side expression's type Any +``` +This code gives a compile-time warning in Scala 3.2 (and also earlier Scala 3.x under the `-source future` setting) whereas it will fail at runtime with a `ClassCastException` in Scala 2. In Scala 3.2, a pattern binding is only allowed if the pattern is _irrefutable_, that is, if the right-hand side's type conforms to the pattern's type. For instance, the following is OK: +```scala +val pair = (1, true) +val (x, y) = pair +``` +Sometimes one wants to decompose data anyway, even though the pattern is refutable. For instance, if at some point one knows that a list `elems` is non-empty one might want to decompose it like this: +```scala +val first :: rest = elems // error +``` +This works in Scala 2. In fact it is a typical use case for Scala 2's rules. But in Scala 3.2 it will give a warning. One can avoid the warning by marking the right-hand side with an [`@unchecked`](https://scala-lang.org/api/3.x/scala/unchecked.html) annotation: +```scala +val first :: rest = elems: @unchecked // OK +``` +This will make the compiler accept the pattern binding. It might give an error at runtime instead, if the underlying assumption that `elems` can never be empty is wrong. + +## Pattern Bindings in `for` Expressions + +Analogous changes apply to patterns in `for` expressions. For instance: + +```scala +val elems: List[Any] = List((1, 2), "hello", (3, 4)) +for (x, y) <- elems yield (y, x) // error: pattern's type (Any, Any) is more specialized + // than the right-hand side expression's type Any +``` +This code gives a compile-time warning in Scala 3.2 whereas in Scala 2 the list `elems` +is filtered to retain only the elements of tuple type that match the pattern `(x, y)`. +The filtering functionality can be obtained in Scala 3 by prefixing the pattern with `case`: +```scala +for case (x, y) <- elems yield (y, x) // returns List((2, 1), (4, 3)) +``` + +## Syntax Changes + +Generators in for expressions may be prefixed with `case`. +``` +Generator ::= [‘case’] Pattern1 ‘<-’ Expr +``` + +## Migration + +The new syntax is supported in Scala 3.0. However, to enable smooth cross compilation between Scala 2 and Scala 3, the changed behavior and additional type checks are only enabled under the `-source future` setting. They will be enabled by default in version 3.2 of the language. diff --git a/docs/_spec/TODOreference/changed-features/pattern-matching.md b/docs/_spec/TODOreference/changed-features/pattern-matching.md new file mode 100644 index 000000000000..30ae5d9dc104 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/pattern-matching.md @@ -0,0 +1,243 @@ +--- +layout: doc-page +title: "Option-less pattern matching" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html +--- + +The implementation of pattern matching in Scala 3 was greatly simplified compared to Scala 2. From a user perspective, this means that Scala 3 generated patterns are a _lot_ easier to debug, as variables all show up in debug modes and positions are correctly preserved. + +Scala 3 supports a superset of Scala 2 [extractors](https://www.scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#extractor-patterns). + +## Extractors + +Extractors are objects that expose a method `unapply` or `unapplySeq`: + +```scala +def unapply[A](x: T)(implicit x: B): U +def unapplySeq[A](x: T)(implicit x: B): U +``` + +Extractors that expose the method `unapply` are called fixed-arity extractors, which +work with patterns of fixed arity. Extractors that expose the method `unapplySeq` are +called variadic extractors, which enables variadic patterns. + +### Fixed-Arity Extractors + +Fixed-arity extractors expose the following signature: + +```scala +def unapply[A](x: T)(implicit x: B): U +``` + +The type `U` conforms to one of the following matches: + +- Boolean match +- Product match + +Or `U` conforms to the type `R`: + +```scala +type R = { + def isEmpty: Boolean + def get: S +} +``` + +and `S` conforms to one of the following matches: + +- single match +- name-based match + +The former form of `unapply` has higher precedence, and _single match_ has higher +precedence over _name-based match_. + +A usage of a fixed-arity extractor is irrefutable if one of the following condition holds: + +- `U = true` +- the extractor is used as a product match +- `U = Some[T]` (for Scala 2 compatibility) +- `U <: R` and `U <: { def isEmpty: false }` + +### Variadic Extractors + +Variadic extractors expose the following signature: + +```scala +def unapplySeq[A](x: T)(implicit x: B): U +``` + +The type `U` conforms to one of the following matches: + +- sequence match +- product-sequence match + +Or `U` conforms to the type `R`: + +```scala +type R = { + def isEmpty: Boolean + def get: S +} +``` + +and `S` conforms to one of the two matches above. + +The former form of `unapplySeq` has higher priority, and _sequence match_ has higher +precedence over _product-sequence match_. + +A usage of a variadic extractor is irrefutable if one of the following conditions holds: + +- the extractor is used directly as a sequence match or product-sequence match +- `U = Some[T]` (for Scala 2 compatibility) +- `U <: R` and `U <: { def isEmpty: false }` + +## Boolean Match + +- `U =:= Boolean` +- Pattern-matching on exactly `0` patterns + +For example: + + + +```scala +object Even: + def unapply(s: String): Boolean = s.size % 2 == 0 + +"even" match + case s @ Even() => println(s"$s has an even number of characters") + case s => println(s"$s has an odd number of characters") + +// even has an even number of characters +``` + +## Product Match + +- `U <: Product` +- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` + +For example: + + + +```scala +class FirstChars(s: String) extends Product: + def _1 = s.charAt(0) + def _2 = s.charAt(1) + + // Not used by pattern matching: Product is only used as a marker trait. + def canEqual(that: Any): Boolean = ??? + def productArity: Int = ??? + def productElement(n: Int): Any = ??? + +object FirstChars: + def unapply(s: String): FirstChars = new FirstChars(s) + +"Hi!" match + case FirstChars(char1, char2) => + println(s"First: $char1; Second: $char2") + +// First: H; Second: i +``` + +## Single Match + +- If there is exactly `1` pattern, pattern-matching on `1` pattern with type `U` + + + +```scala +class Nat(val x: Int): + def get: Int = x + def isEmpty = x < 0 + +object Nat: + def unapply(x: Int): Nat = new Nat(x) + +5 match + case Nat(n) => println(s"$n is a natural number") + case _ => () + +// 5 is a natural number +``` + +## Name-based Match + +- `N > 1` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1 ... _N: PN` members in `U` +- Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` + +```scala +object ProdEmpty: + def _1: Int = ??? + def _2: String = ??? + def isEmpty = true + def unapply(s: String): this.type = this + def get = this + +"" match + case ProdEmpty(_, _) => ??? + case _ => () +``` + +## Sequence Match + +- `U <: X`, `T2` and `T3` conform to `T1` + +```scala +type X = { + def lengthCompare(len: Int): Int // or, `def length: Int` + def apply(i: Int): T1 + def drop(n: Int): scala.Seq[T2] + def toSeq: scala.Seq[T3] +} +``` + +- Pattern-matching on _exactly_ `N` simple patterns with types `T1, T1, ..., T1`, where `N` is the runtime size of the sequence, or +- Pattern-matching on `>= N` simple patterns and _a vararg pattern_ (e.g., `xs: _*`) with types `T1, T1, ..., T1, Seq[T1]`, where `N` is the minimum size of the sequence. + + + +```scala +object CharList: + def unapplySeq(s: String): Option[Seq[Char]] = Some(s.toList) + +"example" match + case CharList(c1, c2, c3, c4, _, _, _) => + println(s"$c1,$c2,$c3,$c4") + case _ => + println("Expected *exactly* 7 characters!") + +// e,x,a,m +``` + +## Product-Sequence Match + +- `U <: Product` +- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- `PN` conforms to the signature `X` defined in Seq Pattern +- Pattern-matching on exactly `>= N` patterns, the first `N - 1` patterns have types `P1, P2, ... P(N-1)`, + the type of the remaining patterns are determined as in Seq Pattern. + +```scala +class Foo(val name: String, val children: Int*) +object Foo: + def unapplySeq(f: Foo): Option[(String, Seq[Int])] = + Some((f.name, f.children)) + +def foo(f: Foo) = f match + case Foo(name, x, y, ns*) => ">= two children." + case Foo(name, ns*) => => "< two children." +``` + +There are plans for further simplification, in particular to factor out _product match_ +and _name-based match_ into a single type of extractor. + +## Type testing + +Abstract type testing with `ClassTag` is replaced with `TypeTest` or the alias `Typeable`. + +- pattern `_: X` for an abstract type requires a `TypeTest` in scope +- pattern `x @ X()` for an unapply that takes an abstract type requires a `TypeTest` in scope + +[More details on `TypeTest`](../other-new-features/type-test.md) diff --git a/docs/_spec/TODOreference/changed-features/structural-types-spec.md b/docs/_spec/TODOreference/changed-features/structural-types-spec.md new file mode 100644 index 000000000000..d456932649fb --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/structural-types-spec.md @@ -0,0 +1,153 @@ +--- +layout: doc-page +title: "Programmatic Structural Types - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/structural-types-spec.html +--- + +## Syntax + +``` +SimpleType ::= ... | Refinement +Refinement ::= ‘{’ RefineStatSeq ‘}’ +RefineStatSeq ::= RefineStat {semi RefineStat} +RefineStat ::= ‘val’ VarDcl | ‘def’ DefDcl | ‘type’ {nl} TypeDcl +``` + +## Implementation of Structural Types + +The standard library defines a universal marker trait +[`scala.Selectable`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/Selectable.scala): + +```scala +trait Selectable extends Any +``` + +An implementation of `Selectable` that relies on [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html) is +available in the standard library: `scala.reflect.Selectable`. Other +implementations can be envisioned for platforms where Java reflection +is not available. + +Implementations of `Selectable` have to make available one or both of +the methods `selectDynamic` and `applyDynamic`. The methods could be members of the `Selectable` implementation or they could be extension methods. + +The `selectDynamic` method takes a field name and returns the value associated with that name in the `Selectable`. +It should have a signature of the form: + +```scala +def selectDynamic(name: String): T +``` + +Often, the return type `T` is `Any`. + +Unlike `scala.Dynamic`, there is no special meaning for an `updateDynamic` method. +However, we reserve the right to give it meaning in the future. +Consequently, it is recommended not to define any member called `updateDynamic` in `Selectable`s. + +The `applyDynamic` method is used for selections that are applied to arguments. It takes a method name and possibly `Class`es representing its parameters types as well as the arguments to pass to the function. +Its signature should be of one of the two following forms: + +```scala +def applyDynamic(name: String)(args: Any*): T +def applyDynamic(name: String, ctags: Class[?]*)(args: Any*): T +``` + +Both versions are passed the actual arguments in the `args` parameter. The second version takes in addition a vararg argument of `java.lang.Class`es that identify the method's parameter classes. Such an argument is needed +if `applyDynamic` is implemented using Java reflection, but it could be +useful in other cases as well. `selectDynamic` and `applyDynamic` can also take additional context parameters in using clauses. These are resolved in the normal way at the callsite. + +Given a value `v` of type `C { Rs }`, where `C` is a class reference +and `Rs` are structural refinement declarations, and given `v.a` of type `U`, we consider three distinct cases: + +- If `U` is a value type, we map `v.a` to: + ```scala + v.selectDynamic("a").asInstanceOf[U] + ``` + +- If `U` is a method type `(T11, ..., T1n)...(TN1, ..., TNn): R` and it is not a dependent method type, we map `v.a(a11, ..., a1n)...(aN1, ..., aNn)` to: + ```scala + v.applyDynamic("a")(a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + If this call resolves to an `applyDynamic` method of the second form that takes a `Class[?]*` argument, we further rewrite this call to + ```scala + v.applyDynamic("a", c11, ..., c1n, ..., cN1, ... cNn)( + a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + where each `c_ij` is the literal `java.lang.Class[?]` of the type of the formal parameter `Tij`, i.e., `classOf[Tij]`. + +- If `U` is neither a value nor a method type, or a dependent method + type, an error is emitted. + +Note that `v`'s static type does not necessarily have to conform to `Selectable`, nor does it need to have `selectDynamic` and `applyDynamic` as members. It suffices that there is an implicit +conversion that can turn `v` into a `Selectable`, and the selection methods could also be available as +[extension methods](../contextual/extension-methods.md). + +## Limitations of Structural Types + +- Dependent methods cannot be called via structural call. + +- Refinements may not introduce overloads: If a refinement specifies the signature + of a method `m`, and `m` is also defined in the parent type of the refinement, then + the new signature must properly override the existing one. + +- Subtyping of structural refinements must preserve erased parameter types: Assume + we want to prove `S <: T { def m(x: A): B }`. Then, as usual, `S` must have a member method `m` that can take an argument of type `A`. Furthermore, if `m` is not a member of `T` (i.e. the refinement is structural), an additional condition applies. In this case, the member _definition_ `m` of `S` will have a parameter + with type `A'` say. The additional condition is that the erasure of `A'` and `A` is the same. Here is an example: + + ```scala + class Sink[A] { def put(x: A): Unit = {} } + val a = Sink[String]() + val b: { def put(x: String): Unit } = a // error + b.put("abc") // looks for a method with a `String` parameter + ``` + The second to last line is not well-typed, + since the erasure of the parameter type of `put` in class `Sink` is `Object`, + but the erasure of `put`'s parameter in the type of `b` is `String`. + This additional condition is necessary, since we will have to resort + to some (as yet unknown) form of reflection to call a structural member + like `put` in the type of `b` above. The condition ensures that the statically + known parameter types of the refinement correspond up to erasure to the + parameter types of the selected call target at runtime. + + Most reflection dispatch algorithms need to know exact erased parameter types. For instance, if the example above would typecheck, the call + `b.put("abc")` on the last line would look for a method `put` in the runtime type of `b` that takes a `String` parameter. But the `put` method is the one from class `Sink`, which takes an `Object` parameter. Hence the call would fail at runtime with a `NoSuchMethodException`. + + One might hope for a "more intelligent" reflexive dispatch algorithm that does not require exact parameter type matching. Unfortunately, this can always run into ambiguities, as long as overloading is a possibility. For instance, continuing the example above, we might introduce a new subclass `Sink1` of `Sink` and change the definition of `a` as follows: + + ```scala + class Sink1[A] extends Sink[A] { def put(x: "123") = ??? } + val a: Sink[String] = Sink1[String]() + ``` + + Now there are two `put` methods in the runtime type of `b` with erased parameter + types `Object` and `String`, respectively. Yet dynamic dispatch still needs to go + to the first `put` method, even though the second looks like a better match. + + For the cases where we can in fact implement reflection without knowing precise parameter types (for instance if static overloading is replaced by dynamically dispatched multi-methods), there is an escape hatch. For types that extend `scala.Selectable.WithoutPreciseParameterTypes` the signature check is omitted. Example: + + ```scala + trait MultiMethodSelectable extends Selectable.WithoutPreciseParameterTypes: + // Assume this version of `applyDynamic` can be implemented without knowing + // precise parameter types `paramTypes`: + def applyDynamic(name: String, paramTypes: Class[_]*)(args: Any*): Any = ??? + + class Sink[A] extends MultiMethodSelectable: + def put(x: A): Unit = {} + + val a = new Sink[String] + val b: MultiMethodSelectable { def put(x: String): Unit } = a // OK + ``` +## Differences with Scala 2 Structural Types + +- Scala 2 supports structural types by means of Java reflection. Unlike + Scala 3, structural calls do not rely on a mechanism such as + `Selectable`, and reflection cannot be avoided. +- In Scala 2, refinements can introduce overloads. +- In Scala 2, mutable `var`s are allowed in refinements. In Scala 3, + they are no longer allowed. +- Scala 2 does not impose the "same-erasure" restriction on subtyping of structural types. It allows some calls to fail at runtime instead. + +## Context + +For more information, see [Rethink Structural Types](https://github.com/lampepfl/dotty/issues/1886). diff --git a/docs/_spec/TODOreference/changed-features/structural-types.md b/docs/_spec/TODOreference/changed-features/structural-types.md new file mode 100644 index 000000000000..37e583332cf1 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/structural-types.md @@ -0,0 +1,191 @@ +--- +layout: doc-page +title: "Programmatic Structural Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/structural-types.html +--- + +## Motivation + +Some usecases, such as modelling database access, are more awkward in +statically typed languages than in dynamically typed languages: With +dynamically typed languages, it's quite natural to model a row as a +record or object, and to select entries with simple dot notation (e.g. +`row.columnName`). + +Achieving the same experience in statically typed +language requires defining a class for every possible row arising from +database manipulation (including rows arising from joins and +projections) and setting up a scheme to map between a row and the +class representing it. + +This requires a large amount of boilerplate, which leads developers to +trade the advantages of static typing for simpler schemes where colum +names are represented as strings and passed to other operators (e.g. +`row.select("columnName")`). This approach forgoes the advantages of +static typing, and is still not as natural as the dynamically typed +version. + +Structural types help in situations where we would like to support +simple dot notation in dynamic contexts without losing the advantages +of static typing. They allow developers to use dot notation and +configure how fields and methods should be resolved. + +## Example + +Here's an example of a structural type `Person`: + +```scala + class Record(elems: (String, Any)*) extends Selectable: + private val fields = elems.toMap + def selectDynamic(name: String): Any = fields(name) + + type Person = Record { val name: String; val age: Int } + ``` + +The type `Person` adds a _refinement_ to its parent type `Record` that defines the two fields `name` and `age`. We say the refinement is _structural_ since `name` and `age` are not defined in the parent type. But they exist nevertheless as members of class `Person`. For instance, the following +program would print "Emma is 42 years old.": + +```scala + val person = Record("name" -> "Emma", "age" -> 42).asInstanceOf[Person] + println(s"${person.name} is ${person.age} years old.") +``` + +The parent type `Record` in this example is a generic class that can represent arbitrary records in its `elems` argument. This argument is a +sequence of pairs of labels of type `String` and values of type `Any`. +When we create a `Person` as a `Record` we have to assert with a typecast +that the record defines the right fields of the right types. `Record` +itself is too weakly typed so the compiler cannot know this without +help from the user. In practice, the connection between a structural type +and its underlying generic representation would most likely be done by +a database layer, and therefore would not be a concern of the end user. + +`Record` extends the marker trait [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) and defines +a method `selectDynamic`, which maps a field name to its value. +Selecting a structural type member is done by calling this method. +The `person.name` and `person.age` selections are translated by +the Scala compiler to: + +```scala + person.selectDynamic("name").asInstanceOf[String] + person.selectDynamic("age").asInstanceOf[Int] +``` + +Besides `selectDynamic`, a `Selectable` class sometimes also defines a method `applyDynamic`. This can then be used to translate function calls of structural members. So, if `a` is an instance of `Selectable`, a structural call like `a.f(b, c)` would translate to + +```scala + a.applyDynamic("f")(b, c) +``` + +## Using Java Reflection + +Structural types can also be accessed using [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html). Example: + +```scala + type Closeable = { def close(): Unit } + + class FileInputStream: + def close(): Unit + + class Channel: + def close(): Unit +``` + +Here, we define a structural type `Closeable` that defines a `close` method. There are various classes that have `close` methods, we just list [`FileInputStream`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/FileInputStream.html#%3Cinit%3E(java.io.File)) and [`Channel`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/channels/Channel.html) as two examples. It would be easiest if the two classes shared a common interface that factors out the `close` method. But such factorings are often not possible if different libraries are combined in one application. Yet, we can still have methods that work on +all classes with a `close` method by using the `Closeable` type. For instance, + +```scala + import scala.reflect.Selectable.reflectiveSelectable + + def autoClose(f: Closeable)(op: Closeable => Unit): Unit = + try op(f) finally f.close() +``` + +The call `f.close()` has to use Java reflection to identify and call the `close` method in the receiver `f`. This needs to be enabled by an import +of `reflectiveSelectable` shown above. What happens "under the hood" is then the following: + + - The import makes available an implicit conversion that turns any type into a + `Selectable`. `f` is wrapped in this conversion. + + - The compiler then transforms the `close` call on the wrapped `f` + to an `applyDynamic` call. The end result is: + + ```scala + reflectiveSelectable(f).applyDynamic("close")() + ``` + - The implementation of `applyDynamic` in `reflectiveSelectable`'s result +uses Java reflection to find and call a method `close` with zero parameters in the value referenced by `f` at runtime. + +Structural calls like this tend to be much slower than normal method calls. The mandatory import of `reflectiveSelectable` serves as a signpost that something inefficient is going on. + +**Note:** In Scala 2, Java reflection is the only mechanism available for structural types and it is automatically enabled without needing the +`reflectiveSelectable` conversion. However, to warn against inefficient +dispatch, Scala 2 requires a language import `import scala.language.reflectiveCalls`. + +Before resorting to structural calls with Java reflection one should consider alternatives. For instance, sometimes a more modular _and_ efficient architecture can be obtained using type classes. + +## Extensibility + +New instances of `Selectable` can be defined to support means of +access other than Java reflection, which would enable usages such as +the database access example given at the beginning of this document. + +## Local Selectable Instances + +Local and anonymous classes that extend `Selectable` get more refined types +than other classes. Here is an example: + +```scala +trait Vehicle extends reflect.Selectable: + val wheels: Int + +val i3 = new Vehicle: // i3: Vehicle { val range: Int } + val wheels = 4 + val range = 240 + +i3.range +``` + +The type of `i3` in this example is `Vehicle { val range: Int }`. Hence, +`i3.range` is well-formed. Since the base class `Vehicle` does not define a `range` field or method, we need structural dispatch to access the `range` field of the anonymous class that initializes `id3`. Structural dispatch +is implemented by the base trait [`reflect.Selectable`](https://scala-lang.org/api/3.x/scala/reflect/Selectable.html) of `Vehicle`, which defines the necessary `selectDynamic` member. + +`Vehicle` could also extend some other subclass of [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) that implements `selectDynamic` and `applyDynamic` differently. But if it does not extend a `Selectable` at all, the code would no longer typecheck: + +```scala +trait Vehicle: + val wheels: Int + +val i3 = new Vehicle: // i3: Vehicle + val wheels = 4 + val range = 240 + +i3.range // error: range is not a member of `Vehicle` +``` + +The difference is that the type of an anonymous class that does not extend `Selectable` is just formed from the parent type(s) of the class, without +adding any refinements. Hence, `i3` now has just type `Vehicle` and the selection `i3.range` gives a "member not found" error. + +Note that in Scala 2 all local and anonymous classes could produce values with refined types. But +members defined by such refinements could be selected only with the language import +[`reflectiveCalls`](https://scala-lang.org/api/3.x/scala/languageFeature$$reflectiveCalls$.html). + +## Relation with `scala.Dynamic` + +There are clearly some connections with [`scala.Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) here, since +both select members programmatically. But there are also some +differences. + +- Fully dynamic selection is not typesafe, but structural selection + is, as long as the correspondence of the structural type with the + underlying value is as stated. + +- [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) is just a marker trait, which gives more leeway where and + how to define reflective access operations. By contrast + `Selectable` is a trait which declares the access operations. + +- Two access operations, `selectDynamic` and `applyDynamic` are shared + between both approaches. In `Selectable`, `applyDynamic` also may also take + [`java.lang.Class`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Class.html) arguments indicating the method's formal parameter types. + [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) comes with `updateDynamic`. + +[More details](structural-types-spec.md) diff --git a/docs/_spec/TODOreference/changed-features/type-checking.md b/docs/_spec/TODOreference/changed-features/type-checking.md new file mode 100644 index 000000000000..6f59b1a1c1c6 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/type-checking.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Changes in Type Checking" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/type-checking.html +--- + +*** **TO BE FILLED IN** *** diff --git a/docs/_spec/TODOreference/changed-features/type-inference.md b/docs/_spec/TODOreference/changed-features/type-inference.md new file mode 100644 index 000000000000..00d0e959f5ed --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/type-inference.md @@ -0,0 +1,10 @@ +--- +layout: doc-page +title: "Changes in Type Inference" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/type-inference.html +--- + +For more information, see the two presentations + +* [Scala 3, Type inference and You!](https://www.youtube.com/watch?v=lMvOykNQ4zs) by Guillaume Martres (September 2019) +* [GADTs in Dotty](https://www.youtube.com/watch?v=VV9lPg3fNl8) by Aleksander Boruch-Gruszecki (July 2019). diff --git a/docs/_spec/TODOreference/changed-features/vararg-splices.md b/docs/_spec/TODOreference/changed-features/vararg-splices.md new file mode 100644 index 000000000000..43c4acc5f880 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/vararg-splices.md @@ -0,0 +1,40 @@ +--- +layout: doc-page +title: "Vararg Splices" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/vararg-splices.html +--- + +The syntax of vararg splices in patterns and function arguments has changed. The new syntax uses a postfix `*`, analogously to how a vararg parameter is declared. + +```scala +val arr = Array(0, 1, 2, 3) +val lst = List(arr*) // vararg splice argument +lst match + case List(0, 1, xs*) => println(xs) // binds xs to Seq(2, 3) + case List(1, _*) => // wildcard pattern +``` + +The old syntax for splice arguments will be phased out. + +```scala +/*!*/ val lst = List(arr: _*) // syntax error + lst match + case List(0, 1, xs @ _*) // ok, equivalent to `xs*` +``` + +## Syntax + +``` +ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ + | ‘(’ [Patterns ‘,’] Pattern2 ‘*’ ‘)’ + +ParArgumentExprs ::= ‘(’ [‘using’] ExprsInParens ‘)’ + | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ +``` + +## Compatibility considerations + +To enable cross compilation between Scala 2 and Scala 3, the compiler will +accept both the old and the new syntax. Under the `-source future` setting, an error +will be emitted when the old syntax is encountered. An automatic rewrite from old +to new syntax is offered under `-source future-migration`. diff --git a/docs/_spec/TODOreference/changed-features/wildcards.md b/docs/_spec/TODOreference/changed-features/wildcards.md new file mode 100644 index 000000000000..0d3e13c3d7e0 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/wildcards.md @@ -0,0 +1,50 @@ +--- +layout: doc-page +title: Wildcard Arguments in Types +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/wildcards.html +--- + +The syntax of wildcard arguments in types has changed from `_` to `?`. Example: +```scala +List[?] +Map[? <: AnyRef, ? >: Null] +``` + +## Motivation + +We would like to use the underscore syntax `_` to stand for an anonymous type parameter, aligning it with its meaning in +value parameter lists. So, just as `f(_)` is a shorthand for the lambda `x => f(x)`, in the future `C[_]` will be a shorthand +for the type lambda `[X] =>> C[X]`. This makes higher-kinded types easier to use. It also removes the wart that, used as a type +parameter, `F[_]` means `F` is a type constructor whereas used as a type, `F[_]` means it is a wildcard (i.e. existential) type. +In the future, `F[_]` will mean the same thing, no matter where it is used. + +We pick `?` as a replacement syntax for wildcard types, since it aligns with +[Java's syntax](https://docs.oracle.com/javase/tutorial/java/generics/wildcardGuidelines.html). + +## Migration Strategy + +The migration to the new scheme is complicated, in particular since the [kind projector](https://github.com/typelevel/kind-projector) +compiler plugin still uses the reverse convention, with `?` meaning parameter placeholder instead of wildcard. Fortunately, kind projector has added `*` as an alternative syntax for `?`. + +A step-by-step migration is made possible with the following measures: + + 1. In Scala 3.0, both `_` and `?` are legal names for wildcards. + 2. In Scala 3.1, `_` is deprecated in favor of `?` as a name for a wildcard. A `-rewrite` option is + available to rewrite one to the other. + 3. In Scala 3.2, the meaning of `_` changes from wildcard to placeholder for type parameter. + 4. The Scala 3.1 behavior is already available today under the `-source future` setting. + +To smooth the transition for codebases that use kind-projector, we adopt the following measures under the command line +option `-Ykind-projector`: + + 1. In Scala 3.0, `*` is available as a type parameter placeholder. + 2. In Scala 3.2, `*` is deprecated in favor of `_`. A `-rewrite` option is + available to rewrite one to the other. + 3. In Scala 3.3, `*` is removed again, and all type parameter placeholders will be expressed with `_`. + +These rules make it possible to cross build between Scala 2 using the kind projector plugin and Scala 3.0 - 3.2 using the compiler option `-Ykind-projector`. + +There is also a migration path for users that want a one-time transition to syntax with `_` as a type parameter placeholder. +With option `-Ykind-projector:underscores` Scala 3 will regard `_` as a type parameter placeholder, leaving `?` as the only syntax for wildcards. + +To cross-compile with old Scala 2 sources, while using `_` a placeholder, you must use options `-Xsource:3 -P:kind-projector:underscore-placeholders` together with a recent version of kind-projector (`0.13` and higher) and most recent versions of Scala 2 (`2.13.5` and higher and `2.12.14` and higher) diff --git a/docs/_spec/TODOreference/contextual/by-name-context-parameters.md b/docs/_spec/TODOreference/contextual/by-name-context-parameters.md new file mode 100644 index 000000000000..3004bfb2c4c2 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/by-name-context-parameters.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "By-Name Context Parameters" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/by-name-context-parameters.html +--- + +Context parameters can be declared by-name to avoid a divergent inferred expansion. Example: + +```scala +trait Codec[T]: + def write(x: T): Unit + +given intCodec: Codec[Int] = ??? + +given optionCodec[T](using ev: => Codec[T]): Codec[Option[T]] with + def write(xo: Option[T]) = xo match + case Some(x) => ev.write(x) + case None => + +val s = summon[Codec[Option[Int]]] + +s.write(Some(33)) +s.write(None) +``` +As is the case for a normal by-name parameter, the argument for the context parameter `ev` +is evaluated on demand. In the example above, if the option value `x` is `None`, it is +not evaluated at all. + +The synthesized argument for a context parameter is backed by a local val +if this is necessary to prevent an otherwise diverging expansion. + +The precise steps for synthesizing an argument for a by-name context parameter of type `=> T` are as follows. + + 1. Create a new given of type `T`: + + ```scala + given lv: T = ??? + ``` + + where `lv` is an arbitrary fresh name. + + 1. This given is not immediately available as candidate for argument inference (making it immediately available could result in a loop in the synthesized computation). But it becomes available in all nested contexts that look again for an argument to a by-name context parameter. + + 1. If this search succeeds with expression `E`, and `E` contains references to `lv`, replace `E` by + + ```scala + { given lv: T = E; lv } + ``` + + Otherwise, return `E` unchanged. + +In the example above, the definition of `s` would be expanded as follows. + +```scala +val s = summon[Test.Codec[Option[Int]]]( + optionCodec[Int](using intCodec) +) +``` + +No local given instance was generated because the synthesized argument is not recursive. + +## Reference + +For more information, see [Issue #1998](https://github.com/lampepfl/dotty/issues/1998) +and the associated [Scala SIP](https://docs.scala-lang.org/sips/byname-implicits.html). diff --git a/docs/_spec/TODOreference/contextual/context-bounds.md b/docs/_spec/TODOreference/contextual/context-bounds.md new file mode 100644 index 000000000000..42479d6802b3 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/context-bounds.md @@ -0,0 +1,53 @@ +--- +layout: doc-page +title: "Context Bounds" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-bounds.html +--- + +A context bound is a shorthand for expressing the common pattern of a context parameter that depends on a type parameter. Using a context bound, the `maximum` function of the last section can be written like this: + +```scala +def maximum[T: Ord](xs: List[T]): T = xs.reduceLeft(max) +``` + +A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `using Ord[T]`. The context parameter(s) generated from context bounds +are added as follows: + + - If the method parameters end in an implicit parameter list or using clause, + context parameters are added in front of that list. + - Otherwise they are added as a separate parameter clause at the end. + +Example: + +```scala +def f[T: C1 : C2, U: C3](x: T)(using y: U, z: V): R +``` + +would expand to + +```scala +def f[T, U](x: T)(using _: C1[T], _: C2[T], _: C3[U], y: U, z: V): R +``` + +Context bounds can be combined with subtype bounds. If both are present, subtype bounds come first, e.g. + +```scala +def g[T <: B : C](x: T): R = ... +``` + +## Migration + +To ease migration, context bounds in Dotty map in Scala 3.0 to old-style implicit parameters +for which arguments can be passed either with a `(using ...)` clause or with a normal application. From Scala 3.1 on, they will map to context parameters instead, as is described above. + +If the source version is `future-migration`, any pairing of an evidence +context parameter stemming from a context bound with a normal argument will give a migration +warning. The warning indicates that a `(using ...)` clause is needed instead. The rewrite can be +done automatically under `-rewrite`. + +## Syntax + +``` +TypeParamBounds ::= [SubtypeBounds] {ContextBound} +ContextBound ::= ‘:’ Type +``` diff --git a/docs/_spec/TODOreference/contextual/context-functions-spec.md b/docs/_spec/TODOreference/contextual/context-functions-spec.md new file mode 100644 index 000000000000..109513e9da86 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/context-functions-spec.md @@ -0,0 +1,79 @@ +--- +layout: doc-page +title: "Context Functions - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-functions-spec.html +--- + +## Syntax + +``` +Type ::= ... + | FunArgTypes ‘?=>’ Type +Expr ::= ... + | FunParams ‘?=>’ Expr +``` + +Context function types associate to the right, e.g. +`S ?=> T ?=> U` is the same as `S ?=> (T ?=> U)`. + +## Implementation + +Context function types are shorthands for class types that define `apply` +methods with context parameters. Specifically, the `N`-ary function type + +`T1, ..., TN ?=> R` is a shorthand for the class type +`ContextFunctionN[T1, ..., TN, R]`. Such class types are assumed to have the following definitions, for any value of `N >= 1`: + +```scala +package scala +trait ContextFunctionN[-T1, ..., -TN, +R]: + def apply(using x1: T1, ..., xN: TN): R +``` + +Context function types erase to normal function types, so these classes are +generated on the fly for typechecking, but not realized in actual code. + +Context function literals `(x1: T1, ..., xn: Tn) ?=> e` map +context parameters `xi` of types `Ti` to the result of evaluating the expression `e`. +The scope of each context parameter `xi` is `e`. The parameters must have pairwise distinct names. + +If the expected type of the context function literal is of the form +`scala.ContextFunctionN[S1, ..., Sn, R]`, the expected type of `e` is `R` and +the type `Ti` of any of the parameters `xi` can be omitted, in which case `Ti += Si` is assumed. If the expected type of the context function literal is +some other type, all context parameter types must be explicitly given, and the expected type of `e` is undefined. +The type of the context function literal is `scala.ContextFunctionN[S1, ...,Sn, T]`, where `T` is the widened +type of `e`. `T` must be equivalent to a type which does not refer to any of +the context parameters `xi`. + +The context function literal is evaluated as the instance creation expression + +```scala +new scala.ContextFunctionN[T1, ..., Tn, T]: + def apply(using x1: T1, ..., xn: Tn): T = e +``` + +A context parameter may also be a wildcard represented by an underscore `_`. In that case, a fresh name for the parameter is chosen arbitrarily. + +**Note:** The closing paragraph of the +[Anonymous Functions section](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#anonymous-functions) +of Scala 2.13 is subsumed by context function types and should be removed. + +Context function literals `(x1: T1, ..., xn: Tn) ?=> e` are +automatically created for any expression `e` whose expected type is +`scala.ContextFunctionN[T1, ..., Tn, R]`, unless `e` is +itself a context function literal. This is analogous to the automatic +insertion of [`scala.Function0`](https://scala-lang.org/api/3.x/scala/Function0.html) around expressions in by-name argument position. + +Context function types generalize to `N > 22` in the same way that function types do, see [the corresponding +documentation](../dropped-features/limit22.md). + +## Examples + +See the section on Expressiveness from [Simplicitly: foundations and +applications of implicit function +types](https://dl.acm.org/citation.cfm?id=3158130). + +## Type Checking + +After desugaring no additional typing rules are required for context function types. diff --git a/docs/_spec/TODOreference/contextual/context-functions.md b/docs/_spec/TODOreference/contextual/context-functions.md new file mode 100644 index 000000000000..0ad3c8757782 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/context-functions.md @@ -0,0 +1,154 @@ +--- +layout: doc-page +title: "Context Functions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-functions.html +--- + +_Context functions_ are functions with (only) context parameters. +Their types are _context function types_. Here is an example of a context function type: + +```scala +type Executable[T] = ExecutionContext ?=> T +``` +Context functions are written using `?=>` as the "arrow" sign. +They are applied to synthesized arguments, in +the same way methods with context parameters are applied. For instance: +```scala + given ec: ExecutionContext = ... + + def f(x: Int): ExecutionContext ?=> Int = ... + + // could be written as follows with the type alias from above + // def f(x: Int): Executable[Int] = ... + + f(2)(using ec) // explicit argument + f(2) // argument is inferred +``` +Conversely, if the expected type of an expression `E` is a context function type +`(T_1, ..., T_n) ?=> U` and `E` is not already an +context function literal, `E` is converted to a context function literal by rewriting it to +```scala + (x_1: T1, ..., x_n: Tn) ?=> E +``` +where the names `x_1`, ..., `x_n` are arbitrary. This expansion is performed +before the expression `E` is typechecked, which means that `x_1`, ..., `x_n` +are available as givens in `E`. + +Like their types, context function literals are written using `?=>` as the arrow between parameters and results. They differ from normal function literals in that their types are context function types. + +For example, continuing with the previous definitions, +```scala + def g(arg: Executable[Int]) = ... + + g(22) // is expanded to g((ev: ExecutionContext) ?=> 22) + + g(f(2)) // is expanded to g((ev: ExecutionContext) ?=> f(2)(using ev)) + + g((ctx: ExecutionContext) ?=> f(3)) // is expanded to g((ctx: ExecutionContext) ?=> f(3)(using ctx)) + g((ctx: ExecutionContext) ?=> f(3)(using ctx)) // is left as it is +``` + +## Example: Builder Pattern + +Context function types have considerable expressive power. For +instance, here is how they can support the "builder pattern", where +the aim is to construct tables like this: +```scala + table { + row { + cell("top left") + cell("top right") + } + row { + cell("bottom left") + cell("bottom right") + } + } +``` +The idea is to define classes for `Table` and `Row` that allow the +addition of elements via `add`: +```scala + class Table: + val rows = new ArrayBuffer[Row] + def add(r: Row): Unit = rows += r + override def toString = rows.mkString("Table(", ", ", ")") + + class Row: + val cells = new ArrayBuffer[Cell] + def add(c: Cell): Unit = cells += c + override def toString = cells.mkString("Row(", ", ", ")") + + case class Cell(elem: String) +``` +Then, the `table`, `row` and `cell` constructor methods can be defined +with context function types as parameters to avoid the plumbing boilerplate +that would otherwise be necessary. +```scala + def table(init: Table ?=> Unit) = + given t: Table = Table() + init + t + + def row(init: Row ?=> Unit)(using t: Table) = + given r: Row = Row() + init + t.add(r) + + def cell(str: String)(using r: Row) = + r.add(new Cell(str)) +``` +With that setup, the table construction code above compiles and expands to: +```scala + table { ($t: Table) ?=> + + row { ($r: Row) ?=> + cell("top left")(using $r) + cell("top right")(using $r) + }(using $t) + + row { ($r: Row) ?=> + cell("bottom left")(using $r) + cell("bottom right")(using $r) + }(using $t) + } +``` +## Example: Postconditions + +As a larger example, here is a way to define constructs for checking arbitrary postconditions using an extension method `ensuring` so that the checked result can be referred to simply by `result`. The example combines opaque type aliases, context function types, and extension methods to provide a zero-overhead abstraction. + +```scala +object PostConditions: + opaque type WrappedResult[T] = T + + def result[T](using r: WrappedResult[T]): T = r + + extension [T](x: T) + def ensuring(condition: WrappedResult[T] ?=> Boolean): T = + assert(condition(using x)) + x +end PostConditions +import PostConditions.{ensuring, result} + +val s = List(1, 2, 3).sum.ensuring(result == 6) +``` +**Explanations**: We use a context function type `WrappedResult[T] ?=> Boolean` +as the type of the condition of `ensuring`. An argument to `ensuring` such as +`(result == 6)` will therefore have a given of type `WrappedResult[T]` in +scope to pass along to the `result` method. `WrappedResult` is a fresh type, to make sure +that we do not get unwanted givens in scope (this is good practice in all cases +where context parameters are involved). Since `WrappedResult` is an opaque type alias, its +values need not be boxed, and since `ensuring` is added as an extension method, its argument +does not need boxing either. Hence, the implementation of `ensuring` is close in efficiency to the best possible code one could write by hand: + +```scala +val s = + val result = List(1, 2, 3).sum + assert(result == 6) + result +``` +## Reference + +For more information, see the [blog article](https://www.scala-lang.org/blog/2016/12/07/implicit-function-types.html), +(which uses a different syntax that has been superseded). + +[More details](./context-functions-spec.md) diff --git a/docs/_spec/TODOreference/contextual/contextual.md b/docs/_spec/TODOreference/contextual/contextual.md new file mode 100644 index 000000000000..fda63397f8f9 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/contextual.md @@ -0,0 +1,83 @@ +--- +layout: index +title: "Contextual Abstractions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual.html +--- + +## Critique of the Status Quo + +Scala's implicits are its most distinguished feature. They are _the_ fundamental way to abstract over context. They represent a unified paradigm with a great variety of use cases, among them: implementing type classes, establishing context, dependency injection, expressing capabilities, computing new types and proving relationships between them. + +Following Haskell, Scala was the second popular language to have some form of implicits. Other languages have followed suit. E.g [Rust's traits](https://doc.rust-lang.org/rust-by-example/trait.html) or [Swift's protocol extensions](https://docs.swift.org/swift-book/LanguageGuide/Protocols.html#ID521). Design proposals are also on the table for Kotlin as [compile time dependency resolution](https://github.com/Kotlin/KEEP/blob/e863b25f8b3f2e9b9aaac361c6ee52be31453ee0/proposals/compile-time-dependency-resolution.md), for C# as [Shapes and Extensions](https://github.com/dotnet/csharplang/issues/164) +or for F# as [Traits](https://github.com/MattWindsor91/visualfsharp/blob/hackathon-vs/examples/fsconcepts.md). Implicits are also a common feature of theorem provers such as [Coq](https://coq.inria.fr/refman/language/extensions/implicit-arguments.html) or [Agda](https://agda.readthedocs.io/en/latest/language/implicit-arguments.html). + +Even though these designs use widely different terminology, they are all variants of the core idea of _term inference_. Given a type, the compiler synthesizes a "canonical" term that has that type. Scala embodies the idea in a purer form than most other languages: An implicit parameter directly leads to an inferred argument term that could also be written down explicitly. By contrast, type class based designs are less direct since they hide term inference behind some form of type classification and do not offer the option of writing the inferred quantities (typically, dictionaries) explicitly. + +Given that term inference is where the industry is heading, and given that Scala has it in a very pure form, how come implicits are not more popular? In fact, it's fair to say that implicits are at the same time Scala's most distinguished and most controversial feature. I believe this is due to a number of aspects that together make implicits harder to learn than necessary and also make it harder to prevent abuses. + +Particular criticisms are: + +1. Being very powerful, implicits are easily over-used and mis-used. This observation holds in almost all cases when we talk about _implicit conversions_, which, even though conceptually different, share the same syntax with other implicit definitions. For instance, regarding the two definitions + + ```scala + implicit def i1(implicit x: T): C[T] = ... + implicit def i2(x: T): C[T] = ... + ``` + + the first of these is a conditional implicit _value_, the second an implicit _conversion_. Conditional implicit values are a cornerstone for expressing type classes, whereas most applications of implicit conversions have turned out to be of dubious value. The problem is that many newcomers to the language start with defining implicit conversions since they are easy to understand and seem powerful and convenient. Scala 3 will put under a language flag both definitions and applications of "undisciplined" implicit conversions between types defined elsewhere. This is a useful step to push back against overuse of implicit conversions. But the problem remains that syntactically, conversions and values just look too similar for comfort. + +2. Another widespread abuse is over-reliance on implicit imports. This often leads to inscrutable type errors that go away with the right import incantation, leaving a feeling of frustration. Conversely, it is hard to see what implicits a program uses since implicits can hide anywhere in a long list of imports. + +3. The syntax of implicit definitions is too minimal. It consists of a single modifier, `implicit`, that can be attached to a large number of language constructs. A problem with this for newcomers is that it conveys mechanism instead of intent. For instance, a type class instance is an implicit object or val if unconditional and an implicit def with implicit parameters referring to some class if conditional. This describes precisely what the implicit definitions translate to -- just drop the `implicit` modifier, and that's it! But the cues that define intent are rather indirect and can be easily misread, as demonstrated by the definitions of `i1` and `i2` above. + +4. The syntax of implicit parameters also has shortcomings. While implicit _parameters_ are designated specifically, arguments are not. Passing an argument to an implicit parameter looks like a regular application `f(arg)`. This is problematic because it means there can be confusion regarding what parameter gets instantiated in a call. For instance, in + + ```scala + def currentMap(implicit ctx: Context): Map[String, Int] + ``` + + one cannot write `currentMap("abc")` since the string `"abc"` is taken as explicit argument to the implicit `ctx` parameter. One has to write `currentMap.apply("abc")` instead, which is awkward and irregular. For the same reason, a method definition can only have one implicit parameter section and it must always come last. This restriction not only reduces orthogonality, but also prevents some useful program constructs, such as a method with a regular parameter whose type depends on an implicit value. Finally, it's also a bit annoying that implicit parameters must have a name, even though in many cases that name is never referenced. + +5. Implicits pose challenges for tooling. The set of available implicits depends on context, so command completion has to take context into account. This is feasible in an IDE but tools like [Scaladoc](https://docs.scala-lang.org/overviews/scaladoc/overview.html) that are based on static web pages can only provide an approximation. Another problem is that failed implicit searches often give very unspecific error messages, in particular if some deeply recursive implicit search has failed. Note that the Scala 3 compiler has already made a lot of progress in the error diagnostics area. If a recursive search fails some levels down, it shows what was constructed and what is missing. Also, it suggests imports that can bring missing implicits in scope. + +None of the shortcomings is fatal, after all implicits are very widely used, and many libraries and applications rely on them. But together, they make code using implicits a lot more cumbersome and less clear than it could be. + +Historically, many of these shortcomings come from the way implicits were gradually "discovered" in Scala. Scala originally had only implicit conversions with the intended use case of "extending" a class or trait after it was defined, i.e. what is expressed by implicit classes in later versions of Scala. Implicit parameters and instance definitions came later in 2006 and we picked similar syntax since it seemed convenient. For the same reason, no effort was made to distinguish implicit imports or arguments from normal ones. + +Existing Scala programmers by and large have gotten used to the status quo and see little need for change. But for newcomers this status quo presents a big hurdle. I believe if we want to overcome that hurdle, we should take a step back and allow ourselves to consider a radically new design. + +## The New Design + +The following pages introduce a redesign of contextual abstractions in Scala. They introduce four fundamental changes: + +1. [Given Instances](./givens.md) are a new way to define basic terms that can be synthesized. They replace implicit definitions. The core principle of the proposal is that, rather than mixing the `implicit` modifier with a large number of features, we have a single way to define terms that can be synthesized for types. + +2. [Using Clauses](./using-clauses.md) are a new syntax for implicit _parameters_ and their _arguments_. It unambiguously aligns parameters and arguments, solving a number of language warts. It also allows us to have several `using` clauses in a definition. + +3. ["Given" Imports](./given-imports.md) are a new class of import selectors that specifically import + givens and nothing else. + +4. [Implicit Conversions](./conversions.md) are now expressed as given instances of a standard `Conversion` class. All other forms of implicit conversions will be phased out. + +This section also contains pages describing other language features that are related to context abstraction. These are: + +- [Context Bounds](./context-bounds.md), which carry over unchanged. +- [Extension Methods](./extension-methods.md) replace implicit classes in a way that integrates better with type classes. +- [Implementing Type Classes](./type-classes.md) demonstrates how some common type classes can be implemented using the new constructs. +- [Type Class Derivation](./derivation.md) introduces constructs to automatically derive type class instances for ADTs. +- [Multiversal Equality](./multiversal-equality.md) introduces a special type class to support type safe equality. +- [Context Functions](./context-functions.md) provide a way to abstract over context parameters. +- [By-Name Context Parameters](./by-name-context-parameters.md) are an essential tool to define recursive synthesized values without looping. +- [Relationship with Scala 2 Implicits](./relationship-implicits.md) discusses the relationship between old-style implicits and new-style givens and how to migrate from one to the other. + +Overall, the new design achieves a better separation of term inference from the rest of the language: There is a single way to define givens instead of a multitude of forms all taking an `implicit` modifier. There is a single way to introduce implicit parameters and arguments instead of conflating implicit with normal arguments. There is a separate way to import givens that does not allow them to hide in a sea of normal imports. And there is a single way to define an implicit conversion which is clearly marked as such and does not require special syntax. + +This design thus avoids feature interactions and makes the language more consistent and orthogonal. It will make implicits easier to learn and harder to abuse. It will greatly improve the clarity of the 95% of Scala programs that use implicits. It has thus the potential to fulfil the promise of term inference in a principled way that is also accessible and friendly. + +Could we achieve the same goals by tweaking existing implicits? After having tried for a long time, I believe now that this is impossible. + +- First, some of the problems are clearly syntactic and require different syntax to solve them. +- Second, there is the problem how to migrate. We cannot change the rules in mid-flight. At some stage of language evolution we need to accommodate both the new and the old rules. With a syntax change, this is easy: Introduce the new syntax with new rules, support the old syntax for a while to facilitate cross compilation, deprecate and phase out the old syntax at some later time. Keeping the same syntax does not offer this path, and in fact does not seem to offer any viable path for evolution +- Third, even if we would somehow succeed with migration, we still have the problem + how to teach this. We cannot make existing tutorials go away. Almost all existing tutorials start with implicit conversions, which will go away; they use normal imports, which will go away, and they explain calls to methods with implicit parameters by expanding them to plain applications, which will also go away. This means that we'd have + to add modifications and qualifications to all existing literature and courseware, likely causing more confusion with beginners instead of less. By contrast, with a new syntax there is a clear criterion: Any book or courseware that mentions `implicit` is outdated and should be updated. diff --git a/docs/_spec/TODOreference/contextual/conversions.md b/docs/_spec/TODOreference/contextual/conversions.md new file mode 100644 index 000000000000..1ce8d42074e7 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/conversions.md @@ -0,0 +1,76 @@ +--- +layout: doc-page +title: "Implicit Conversions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/conversions.html +--- + +Implicit conversions are defined by given instances of the `scala.Conversion` class. +This class is defined in package `scala` as follows: +```scala +abstract class Conversion[-T, +U] extends (T => U): + def apply (x: T): U +``` +For example, here is an implicit conversion from `String` to `Token`: +```scala +given Conversion[String, Token] with + def apply(str: String): Token = new KeyWord(str) +``` +Using an alias this can be expressed more concisely as: +```scala +given Conversion[String, Token] = new KeyWord(_) +``` +An implicit conversion is applied automatically by the compiler in three situations: + +1. If an expression `e` has type `T`, and `T` does not conform to the expression's expected type `S`. +2. In a selection `e.m` with `e` of type `T`, but `T` defines no member `m`. +3. In an application `e.m(args)` with `e` of type `T`, if `T` does define + some member(s) named `m`, but none of these members can be applied to the arguments `args`. + +In the first case, the compiler looks for a given `scala.Conversion` instance that maps +an argument of type `T` to type `S`. In the second and third +case, it looks for a given `scala.Conversion` instance that maps an argument of type `T` +to a type that defines a member `m` which can be applied to `args` if present. +If such an instance `C` is found, the expression `e` is replaced by `C.apply(e)`. + +## Examples + +1. The `Predef` package contains "auto-boxing" conversions that map +primitive number types to subclasses of `java.lang.Number`. For instance, the +conversion from `Int` to `java.lang.Integer` can be defined as follows: + ```scala + given int2Integer: Conversion[Int, java.lang.Integer] = + java.lang.Integer.valueOf(_) + ``` + +2. The "magnet" pattern is sometimes used to express many variants of a method. Instead of defining overloaded versions of the method, one can also let the method take one or more arguments of specially defined "magnet" types, into which various argument types can be converted. Example: + ```scala + object Completions: + + // The argument "magnet" type + enum CompletionArg: + case Error(s: String) + case Response(f: Future[HttpResponse]) + case Status(code: Future[StatusCode]) + + object CompletionArg: + + // conversions defining the possible arguments to pass to `complete` + // these always come with CompletionArg + // They can be invoked explicitly, e.g. + // + // CompletionArg.fromStatusCode(statusCode) + + given fromString : Conversion[String, CompletionArg] = Error(_) + given fromFuture : Conversion[Future[HttpResponse], CompletionArg] = Response(_) + given fromStatusCode: Conversion[Future[StatusCode], CompletionArg] = Status(_) + end CompletionArg + import CompletionArg.* + + def complete[T](arg: CompletionArg) = arg match + case Error(s) => ... + case Response(f) => ... + case Status(code) => ... + + end Completions + ``` +This setup is more complicated than simple overloading of `complete`, but it can still be useful if normal overloading is not available (as in the case above, since we cannot have two overloaded methods that take `Future[...]` arguments), or if normal overloading would lead to a combinatorial explosion of variants. diff --git a/docs/_spec/TODOreference/contextual/derivation-macro.md b/docs/_spec/TODOreference/contextual/derivation-macro.md new file mode 100644 index 000000000000..be7565616913 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/derivation-macro.md @@ -0,0 +1,205 @@ +--- +layout: doc-page +title: "How to write a type class `derived` method using macros" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/derivation-macro.html +--- + +In the main [derivation](./derivation.md) documentation page, we explained the +details behind `Mirror`s and type class derivation. Here we demonstrate how to +implement a type class `derived` method using macros only. We follow the same +example of deriving `Eq` instances and for simplicity we support a `Product` +type e.g., a case class `Person`. The low-level method we will use to implement +the `derived` method exploits quotes, splices of both expressions and types and +the `scala.quoted.Expr.summon` method which is the equivalent of +`summonFrom`. The former is suitable for use in a quote context, used within +macros. + +As in the original code, the type class definition is the same: + +```scala +trait Eq[T]: + def eqv(x: T, y: T): Boolean +``` + +we need to implement a method `Eq.derived` on the companion object of `Eq` that +produces a quoted instance for `Eq[T]`. Here is a possible signature, + +```scala +given derived[T: Type](using Quotes): Expr[Eq[T]] +``` + +and for comparison reasons we give the same signature we had with `inline`: + +```scala +inline given derived[T](using Mirror.Of[T]): Eq[T] = ??? +``` + +Note, that since a type is used in a subsequent stage it will need to be lifted +to a `Type` by using the corresponding context bound. Also, note that we can +summon the quoted `Mirror` inside the body of the `derived` thus we can omit it +from the signature. The body of the `derived` method is shown below: + + +```scala +given derived[T: Type](using Quotes): Expr[Eq[T]] = + import quotes.reflect.* + + val ev: Expr[Mirror.Of[T]] = Expr.summon[Mirror.Of[T]].get + + ev match + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + def eqProductBody(x: Expr[Product], y: Expr[Product])(using Quotes): Expr[Boolean] = { + elemInstances.zipWithIndex.foldLeft(Expr(true)) { + case (acc, ('{ $elem: Eq[t] }, index)) => + val indexExpr = Expr(index) + val e1 = '{ $x.productElement($indexExpr).asInstanceOf[t] } + val e2 = '{ $y.productElement($indexExpr).asInstanceOf[t] } + '{ $acc && $elem.eqv($e1, $e2) } + } + } + '{ eqProduct((x: T, y: T) => ${eqProductBody('x.asExprOf[Product], 'y.asExprOf[Product])}) } + + // case for Mirror.ProductOf[T] + // ... +``` + +Note, that in the `inline` case we can merely write +`summonAll[m.MirroredElemTypes]` inside the inline method but here, since +`Expr.summon` is required, we can extract the element types in a macro fashion. +Being inside a macro, our first reaction would be to write the code below. Since +the path inside the type argument is not stable this cannot be used: + +```scala +'{ + summonAll[$m.MirroredElemTypes] +} +``` + +Instead we extract the tuple-type for element types using pattern matching over +quotes and more specifically of the refined type: + +```scala + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => ... +``` + +Shown below is the implementation of `summonAll` as a macro. We assume that +given instances for our primitive types exist. + +```scala +def summonAll[T: Type](using Quotes): List[Expr[Eq[_]]] = + Type.of[T] match + case '[String *: tpes] => '{ summon[Eq[String]] } :: summonAll[tpes] + case '[Int *: tpes] => '{ summon[Eq[Int]] } :: summonAll[tpes] + case '[tpe *: tpes] => derived[tpe] :: summonAll[tpes] + case '[EmptyTuple] => Nil +``` + +One additional difference with the body of `derived` here as opposed to the one +with `inline` is that with macros we need to synthesize the body of the code during the +macro-expansion time. That is the rationale behind the `eqProductBody` function. +Assuming that we calculate the equality of two `Person`s defined with a case +class that holds a name of type [`String`](https://scala-lang.org/api/3.x/scala/Predef$.html#String-0) +and an age of type `Int`, the equality check we want to generate is the following: + +```scala + true + && Eq[String].eqv(x.productElement(0),y.productElement(0)) + && Eq[Int].eqv(x.productElement(1), y.productElement(1)) +``` + +## Calling the derived method inside the macro + +Following the rules in [Macros](../metaprogramming/metaprogramming.md) we create two methods. +One that hosts the top-level splice `eqv` and one that is the implementation. +Alternatively and what is shown below is that we can call the `eqv` method +directly. The `eqGen` can trigger the derivation. + +```scala +extension [T](inline x: T) + inline def === (inline y: T)(using eq: Eq[T]): Boolean = eq.eqv(x, y) + +inline given eqGen[T]: Eq[T] = ${ Eq.derived[T] } +``` + +Note, that we use inline method syntax and we can compare instance such as +`Sm(Person("Test", 23)) === Sm(Person("Test", 24))` for e.g., the following two +types: + +```scala +case class Person(name: String, age: Int) + +enum Opt[+T]: + case Sm(t: T) + case Nn +``` + +The full code is shown below: + +```scala +import scala.deriving.* +import scala.quoted.* + + +trait Eq[T]: + def eqv(x: T, y: T): Boolean + +object Eq: + given Eq[String] with + def eqv(x: String, y: String) = x == y + + given Eq[Int] with + def eqv(x: Int, y: Int) = x == y + + def eqProduct[T](body: (T, T) => Boolean): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = body(x, y) + + def eqSum[T](body: (T, T) => Boolean): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = body(x, y) + + def summonAll[T: Type](using Quotes): List[Expr[Eq[_]]] = + Type.of[T] match + case '[String *: tpes] => '{ summon[Eq[String]] } :: summonAll[tpes] + case '[Int *: tpes] => '{ summon[Eq[Int]] } :: summonAll[tpes] + case '[tpe *: tpes] => derived[tpe] :: summonAll[tpes] + case '[EmptyTuple] => Nil + + given derived[T: Type](using q: Quotes): Expr[Eq[T]] = + import quotes.reflect.* + + val ev: Expr[Mirror.Of[T]] = Expr.summon[Mirror.Of[T]].get + + ev match + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + val eqProductBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => + elemInstances.zipWithIndex.foldLeft(Expr(true: Boolean)) { + case (acc, (elem, index)) => + val e1 = '{$x.asInstanceOf[Product].productElement(${Expr(index)})} + val e2 = '{$y.asInstanceOf[Product].productElement(${Expr(index)})} + + '{ $acc && $elem.asInstanceOf[Eq[Any]].eqv($e1, $e2) } + } + '{ eqProduct((x: T, y: T) => ${eqProductBody('x, 'y)}) } + + case '{ $m: Mirror.SumOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + val eqSumBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => + val ordx = '{ $m.ordinal($x) } + val ordy = '{ $m.ordinal($y) } + + val elements = Expr.ofList(elemInstances) + '{ $ordx == $ordy && $elements($ordx).asInstanceOf[Eq[Any]].eqv($x, $y) } + + '{ eqSum((x: T, y: T) => ${eqSumBody('x, 'y)}) } + end derived +end Eq + +object Macro3: + extension [T](inline x: T) + inline def === (inline y: T)(using eq: Eq[T]): Boolean = eq.eqv(x, y) + + inline given eqGen[T]: Eq[T] = ${ Eq.derived[T] } +``` diff --git a/docs/_spec/TODOreference/contextual/derivation.md b/docs/_spec/TODOreference/contextual/derivation.md new file mode 100644 index 000000000000..bad47dcb0096 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/derivation.md @@ -0,0 +1,425 @@ +--- +layout: doc-page +title: "Type Class Derivation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/derivation.html +--- + +Type class derivation is a way to automatically generate given instances for type classes which satisfy some simple +conditions. A type class in this sense is any trait or class with a type parameter determining the type being operated +on. Common examples are `Eq`, `Ordering`, or `Show`. For example, given the following `Tree` algebraic data type +(ADT), + +```scala +enum Tree[T] derives Eq, Ordering, Show: + case Branch(left: Tree[T], right: Tree[T]) + case Leaf(elem: T) +``` + +The `derives` clause generates the following given instances for the `Eq`, `Ordering` and `Show` type classes in the +companion object of `Tree`, + +```scala +given [T: Eq] : Eq[Tree[T]] = Eq.derived +given [T: Ordering] : Ordering[Tree[T]] = Ordering.derived +given [T: Show] : Show[Tree[T]] = Show.derived +``` + +We say that `Tree` is the _deriving type_ and that the `Eq`, `Ordering` and `Show` instances are _derived instances_. + +## Types supporting `derives` clauses + +All data types can have a `derives` clause. This document focuses primarily on data types which also have a given instance +of the `Mirror` type class available. + +`Mirror` type class instances provide information at the type level about the components and labelling of the type. +They also provide minimal term level infrastructure to allow higher level libraries to provide comprehensive +derivation support. + +Instances of the `Mirror` type class are generated automatically by the compiler +unconditionally for: +- enums and enum cases, +- case objects. + +Instances for `Mirror` are also generated conditionally for: +- case classes where the constructor is visible at the callsite (always true if the companion is not a case object) +- sealed classes and sealed traits where: + - there exists at least one child case, + - each child case is reachable from the parent's definition, + - if the sealed trait/class has no companion, then each child case is reachable from the callsite through the prefix of the type being mirrored, + - and where the compiler can generate a `Mirror` type class instance for each child case. + + +The `Mirror` type class definition is as follows: + +```scala +sealed trait Mirror: + + /** the type being mirrored */ + type MirroredType + + /** the type of the elements of the mirrored type */ + type MirroredElemTypes + + /** The mirrored *-type */ + type MirroredMonoType + + /** The name of the type */ + type MirroredLabel <: String + + /** The names of the elements of the type */ + type MirroredElemLabels <: Tuple + +object Mirror: + + /** The Mirror for a product type */ + trait Product extends Mirror: + + /** Create a new instance of type `T` with elements + * taken from product `p`. + */ + def fromProduct(p: scala.Product): MirroredMonoType + + trait Sum extends Mirror: + + /** The ordinal number of the case class of `x`. + * For enums, `ordinal(x) == x.ordinal` + */ + def ordinal(x: MirroredMonoType): Int + +end Mirror +``` + +Product types (i.e. case classes and objects, and enum cases) have mirrors which are subtypes of `Mirror.Product`. Sum +types (i.e. sealed class or traits with product children, and enums) have mirrors which are subtypes of `Mirror.Sum`. + +For the `Tree` ADT from above the following `Mirror` instances will be automatically provided by the compiler, + +```scala +// Mirror for Tree +new Mirror.Sum: + type MirroredType = Tree + type MirroredElemTypes[T] = (Branch[T], Leaf[T]) + type MirroredMonoType = Tree[_] + type MirroredLabel = "Tree" + type MirroredElemLabels = ("Branch", "Leaf") + + def ordinal(x: MirroredMonoType): Int = x match + case _: Branch[_] => 0 + case _: Leaf[_] => 1 + +// Mirror for Branch +new Mirror.Product: + type MirroredType = Branch + type MirroredElemTypes[T] = (Tree[T], Tree[T]) + type MirroredMonoType = Branch[_] + type MirroredLabel = "Branch" + type MirroredElemLabels = ("left", "right") + + def fromProduct(p: Product): MirroredMonoType = + new Branch(...) + +// Mirror for Leaf +new Mirror.Product: + type MirroredType = Leaf + type MirroredElemTypes[T] = Tuple1[T] + type MirroredMonoType = Leaf[_] + type MirroredLabel = "Leaf" + type MirroredElemLabels = Tuple1["elem"] + + def fromProduct(p: Product): MirroredMonoType = + new Leaf(...) +``` + +If a Mirror cannot be generated automatically for a given type, an error will appear explaining why it is neither a supported +sum type nor a product type. For example, if `A` is a trait that is not sealed, + +``` +No given instance of type deriving.Mirror.Of[A] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[A]: + * trait A is not a generic product because it is not a case class + * trait A is not a generic sum because it is not a sealed trait +``` + + +Note the following properties of `Mirror` types, + ++ Properties are encoded using types rather than terms. This means that they have no runtime footprint unless used and + also that they are a compile time feature for use with Scala 3's metaprogramming facilities. ++ There is no restriction against the mirrored type being a local or inner class. ++ The kinds of `MirroredType` and `MirroredElemTypes` match the kind of the data type the mirror is an instance for. + This allows `Mirror`s to support ADTs of all kinds. ++ There is no distinct representation type for sums or products (ie. there is no `HList` or `Coproduct` type as in + Scala 2 versions of Shapeless). Instead the collection of child types of a data type is represented by an ordinary, + possibly parameterized, tuple type. Scala 3's metaprogramming facilities can be used to work with these tuple types + as-is, and higher level libraries can be built on top of them. ++ For both product and sum types, the elements of `MirroredElemTypes` are arranged in definition order (i.e. `Branch[T]` + precedes `Leaf[T]` in `MirroredElemTypes` for `Tree` because `Branch` is defined before `Leaf` in the source file). + This means that `Mirror.Sum` differs in this respect from Shapeless's generic representation for ADTs in Scala 2, + where the constructors are ordered alphabetically by name. ++ The methods `ordinal` and `fromProduct` are defined in terms of `MirroredMonoType` which is the type of kind-`*` + which is obtained from `MirroredType` by wildcarding its type parameters. + +## Type classes supporting automatic deriving + +A trait or class can appear in a `derives` clause if its companion object defines a method named `derived`. The +signature and implementation of a `derived` method for a type class `TC[_]` are arbitrary but it is typically of the +following form, + +```scala +import scala.deriving.Mirror + +inline def derived[T](using Mirror.Of[T]): TC[T] = ... +``` + +That is, the `derived` method takes a context parameter of (some subtype of) type `Mirror` which defines the shape of +the deriving type `T`, and computes the type class implementation according to that shape. This is all that the +provider of an ADT with a `derives` clause has to know about the derivation of a type class instance. + +Note that `derived` methods may have context `Mirror` parameters indirectly (e.g. by having a context argument which in turn +has a context `Mirror` parameter, or not at all (e.g. they might use some completely different user-provided mechanism, for +instance using Scala 3 macros or runtime reflection). We expect that (direct or indirect) `Mirror` based implementations +will be the most common and that is what this document emphasises. + +Type class authors will most likely use higher level derivation or generic programming libraries to implement +`derived` methods. An example of how a `derived` method might be implemented using _only_ the low level facilities +described above and Scala 3's general metaprogramming features is provided below. It is not anticipated that type class +authors would normally implement a `derived` method in this way, however this walkthrough can be taken as a guide for +authors of the higher level derivation libraries that we expect typical type class authors will use (for a fully +worked out example of such a library, see [Shapeless 3](https://github.com/milessabin/shapeless/tree/shapeless-3)). + +## How to write a type class `derived` method using low level mechanisms + +The low-level method we will use to implement a type class `derived` method in this example exploits three new +type-level constructs in Scala 3: inline methods, inline matches, and implicit searches via `summonInline` or `summonFrom`. Given this definition of the +`Eq` type class, + +```scala +trait Eq[T]: + def eqv(x: T, y: T): Boolean +``` + +we need to implement a method `Eq.derived` on the companion object of `Eq` that produces a given instance for `Eq[T]` given +a `Mirror[T]`. Here is a possible implementation, + +```scala +import scala.deriving.Mirror + +inline given derived[T](using m: Mirror.Of[T]): Eq[T] = + val elemInstances = summonAll[m.MirroredElemTypes] // (1) + inline m match // (2) + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) +``` + +Note that `derived` is defined as an `inline` given. This means that the method will be expanded at +call sites (for instance the compiler generated instance definitions in the companion objects of ADTs which have a +`derived Eq` clause), and also that it can be used recursively if necessary, to compute instances for children. + +The body of this method (1) first materializes the `Eq` instances for all the child types of type the instance is +being derived for. This is either all the branches of a sum type or all the fields of a product type. The +implementation of `summonAll` is `inline` and uses Scala 3's `summonInline` construct to collect the instances as a +`List`, + +```scala +inline def summonAll[T <: Tuple]: List[Eq[_]] = + inline erasedValue[T] match + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] +``` + +with the instances for children in hand the `derived` method uses an `inline match` to dispatch to methods which can +construct instances for either sums or products (2). Note that because `derived` is `inline` the match will be +resolved at compile-time and only the left-hand side of the matching case will be inlined into the generated code with +types refined as revealed by the match. + +In the sum case, `eqSum`, we use the runtime `ordinal` values of the arguments to `eqv` to first check if the two +values are of the same subtype of the ADT (3) and then, if they are, to further test for equality based on the `Eq` +instance for the appropriate ADT subtype using the auxiliary method `check` (4). + +```scala +import scala.deriving.Mirror + +def eqSum[T](s: Mirror.SumOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + val ordx = s.ordinal(x) // (3) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) // (4) +``` + +In the product case, `eqProduct` we test the runtime values of the arguments to `eqv` for equality as products based +on the `Eq` instances for the fields of the data type (5), + +```scala +import scala.deriving.Mirror + +def eqProduct[T](p: Mirror.ProductOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { // (5) + case ((x, y), elem) => check(elem)(x, y) + } +``` + +Pulling this all together we have the following complete implementation, + +```scala +import scala.deriving.* +import scala.compiletime.{erasedValue, summonInline} + +inline def summonAll[T <: Tuple]: List[Eq[_]] = + inline erasedValue[T] match + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] + +trait Eq[T]: + def eqv(x: T, y: T): Boolean + +object Eq: + given Eq[Int] with + def eqv(x: Int, y: Int) = x == y + + def check(elem: Eq[_])(x: Any, y: Any): Boolean = + elem.asInstanceOf[Eq[Any]].eqv(x, y) + + def iterator[T](p: T) = p.asInstanceOf[Product].productIterator + + def eqSum[T](s: Mirror.SumOf[T], elems: => List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + val ordx = s.ordinal(x) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) + + def eqProduct[T](p: Mirror.ProductOf[T], elems: => List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { + case ((x, y), elem) => check(elem)(x, y) + } + + inline given derived[T](using m: Mirror.Of[T]): Eq[T] = + lazy val elemInstances = summonAll[m.MirroredElemTypes] + inline m match + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) +end Eq +``` + +we can test this relative to a simple ADT like so, + +```scala +enum Opt[+T] derives Eq: + case Sm(t: T) + case Nn + +@main def test(): Unit = + import Opt.* + val eqoi = summon[Eq[Opt[Int]]] + assert(eqoi.eqv(Sm(23), Sm(23))) + assert(!eqoi.eqv(Sm(23), Sm(13))) + assert(!eqoi.eqv(Sm(23), Nn)) +``` + +In this case the code that is generated by the inline expansion for the derived `Eq` instance for `Opt` looks like the +following, after a little polishing, + +```scala +given derived$Eq[T](using eqT: Eq[T]): Eq[Opt[T]] = + eqSum( + summon[Mirror[Opt[T]]], + List( + eqProduct(summon[Mirror[Sm[T]]], List(summon[Eq[T]])), + eqProduct(summon[Mirror[Nn.type]], Nil) + ) + ) +``` + +Alternative approaches can be taken to the way that `derived` methods can be defined. For example, more aggressively +inlined variants using Scala 3 macros, whilst being more involved for type class authors to write than the example +above, can produce code for type classes like `Eq` which eliminate all the abstraction artefacts (eg. the `Lists` of +child instances in the above) and generate code which is indistinguishable from what a programmer might write by hand. +As a third example, using a higher level library such as Shapeless the type class author could define an equivalent +`derived` method as, + +```scala +given eqSum[A](using inst: => K0.CoproductInstances[Eq, A]): Eq[A] with + def eqv(x: A, y: A): Boolean = inst.fold2(x, y)(false)( + [t] => (eqt: Eq[t], t0: t, t1: t) => eqt.eqv(t0, t1) + ) + +given eqProduct[A](using inst: K0.ProductInstances[Eq, A]): Eq[A] with + def eqv(x: A, y: A): Boolean = inst.foldLeft2(x, y)(true: Boolean)( + [t] => (acc: Boolean, eqt: Eq[t], t0: t, t1: t) => + Complete(!eqt.eqv(t0, t1))(false)(true) + ) + +inline def derived[A](using gen: K0.Generic[A]): Eq[A] = + gen.derive(eqProduct, eqSum) +``` + +The framework described here enables all three of these approaches without mandating any of them. + +For a brief discussion on how to use macros to write a type class `derived` +method please read more at [How to write a type class `derived` method using macros](./derivation-macro.md). + +## Deriving instances elsewhere + +Sometimes one would like to derive a type class instance for an ADT after the ADT is defined, without being able to +change the code of the ADT itself. To do this, simply define an instance using the `derived` method of the type class +as right-hand side. E.g, to implement `Ordering` for `Option` define, + +```scala +given [T: Ordering]: Ordering[Option[T]] = Ordering.derived +``` + +Assuming the `Ordering.derived` method has a context parameter of type `Mirror[T]` it will be satisfied by the +compiler generated `Mirror` instance for `Option` and the derivation of the instance will be expanded on the right +hand side of this definition in the same way as an instance defined in ADT companion objects. + +## Syntax + +``` +Template ::= InheritClauses [TemplateBody] +EnumDef ::= id ClassConstr InheritClauses EnumBody +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp {‘with’ ConstrApp} + | ConstrApp {‘,’ ConstrApp} +``` + +**Note:** To align `extends` clauses and `derives` clauses, Scala 3 also allows multiple +extended types to be separated by commas. So the following is now legal: + +```scala +class A extends B, C { ... } +``` + +It is equivalent to the old form + +```scala +class A extends B with C { ... } +``` + +## Discussion + +This type class derivation framework is intentionally very small and low-level. There are essentially two pieces of +infrastructure in compiler-generated `Mirror` instances, + ++ type members encoding properties of the mirrored types. ++ a minimal value level mechanism for working generically with terms of the mirrored types. + +The `Mirror` infrastructure can be seen as an extension of the existing `Product` infrastructure for case classes: +typically `Mirror` types will be implemented by the ADTs companion object, hence the type members and the `ordinal` or +`fromProduct` methods will be members of that object. The primary motivation for this design decision, and the +decision to encode properties via types rather than terms was to keep the bytecode and runtime footprint of the +feature small enough to make it possible to provide `Mirror` instances _unconditionally_. + +Whilst `Mirrors` encode properties precisely via type members, the value level `ordinal` and `fromProduct` are +somewhat weakly typed (because they are defined in terms of `MirroredMonoType`) just like the members of `Product`. +This means that code for generic type classes has to ensure that type exploration and value selection proceed in +lockstep and it has to assert this conformance in some places using casts. If generic type classes are correctly +written these casts will never fail. + +As mentioned, however, the compiler-provided mechanism is intentionally very low level and it is anticipated that +higher level type class derivation and generic programming libraries will build on this and Scala 3's other +metaprogramming facilities to hide these low-level details from type class authors and general users. Type class +derivation in the style of both Shapeless and Magnolia are possible (a prototype of Shapeless 3, which combines +aspects of both Shapeless 2 and Magnolia has been developed alongside this language feature) as is a more aggressively +inlined style, supported by Scala 3's new quote/splice macro and inlining facilities. diff --git a/docs/_spec/TODOreference/contextual/extension-methods.md b/docs/_spec/TODOreference/contextual/extension-methods.md new file mode 100644 index 000000000000..d23cadf513d7 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/extension-methods.md @@ -0,0 +1,306 @@ +--- +layout: doc-page +title: "Extension Methods" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/extension-methods.html +--- + +Extension methods allow one to add methods to a type after the type is defined. Example: + +```scala +case class Circle(x: Double, y: Double, radius: Double) + +extension (c: Circle) + def circumference: Double = c.radius * math.Pi * 2 +``` + +Like regular methods, extension methods can be invoked with infix `.`: + +```scala +val circle = Circle(0, 0, 1) +circle.circumference +``` + +## Translation of Extension Methods + +An extension method translates to a specially labelled method that takes the leading parameter section as its first argument list. The label, expressed +as `` here, is compiler-internal. So, the definition of `circumference` above translates to the following method, and can also be invoked as such: + +``` + def circumference(c: Circle): Double = c.radius * math.Pi * 2 + +assert(circle.circumference == circumference(circle)) +``` + +## Operators + +The extension method syntax can also be used to define operators. Examples: + +```scala +extension (x: String) + def < (y: String): Boolean = ... +extension (x: Elem) + def +: (xs: Seq[Elem]): Seq[Elem] = ... +extension (x: Number) + infix def min (y: Number): Number = ... + +"ab" < "c" +1 +: List(2, 3) +x min 3 +``` + +The three definitions above translate to + +``` + def < (x: String)(y: String): Boolean = ... + def +: (xs: Seq[Elem])(x: Elem): Seq[Elem] = ... + infix def min(x: Number)(y: Number): Number = ... +``` + +Note the swap of the two parameters `x` and `xs` when translating +the right-associative operator `+:` to an extension method. This is analogous +to the implementation of right binding operators as normal methods. The Scala +compiler preprocesses an infix operation `x +: xs` to `xs.+:(x)`, so the extension +method ends up being applied to the sequence as first argument (in other words, the +two swaps cancel each other out). See [here for details](./right-associative-extension-methods.md). + +## Generic Extensions + +It is also possible to extend generic types by adding type parameters to an extension. For instance: + +```scala +extension [T](xs: List[T]) + def second = xs.tail.head + +extension [T: Numeric](x: T) + def + (y: T): T = summon[Numeric[T]].plus(x, y) +``` + +Type parameters on extensions can also be combined with type parameters on the methods +themselves: + +```scala +extension [T](xs: List[T]) + def sumBy[U: Numeric](f: T => U): U = ... +``` + +Type arguments matching method type parameters are passed as usual: + +```scala +List("a", "bb", "ccc").sumBy[Int](_.length) +``` + +By contrast, type arguments matching type parameters following `extension` can be passed +only if the method is referenced as a non-extension method: + +```scala +sumBy[String](List("a", "bb", "ccc"))(_.length) +``` + +Or, when passing both type arguments: + +```scala +sumBy[String](List("a", "bb", "ccc"))[Int](_.length) +``` + +Extensions can also take using clauses. For instance, the `+` extension above could equivalently be written with a using clause: + +```scala +extension [T](x: T)(using n: Numeric[T]) + def + (y: T): T = n.plus(x, y) +``` + +## Collective Extensions + +Sometimes, one wants to define several extension methods that share the same +left-hand parameter type. In this case one can "pull out" the common parameters into +a single extension and enclose all methods in braces or an indented region. +Example: + +```scala +extension (ss: Seq[String]) + + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + + def longestString: String = longestStrings.head +``` + +The same can be written with braces as follows (note that indented regions can still be used inside braces): + +```scala +extension (ss: Seq[String]) { + + def longestStrings: Seq[String] = { + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + } + + def longestString: String = longestStrings.head +} +``` + +Note the right-hand side of `longestString`: it calls `longestStrings` directly, implicitly +assuming the common extended value `ss` as receiver. + +Collective extensions like these are a shorthand for individual extensions +where each method is defined separately. For instance, the first extension above expands to: + +```scala +extension (ss: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + +extension (ss: Seq[String]) + def longestString: String = ss.longestStrings.head +``` + +Collective extensions also can take type parameters and have using clauses. Example: + +```scala +extension [T](xs: List[T])(using Ordering[T]) + def smallest(n: Int): List[T] = xs.sorted.take(n) + def smallestIndices(n: Int): List[Int] = + val limit = smallest(n).max + xs.zipWithIndex.collect { case (x, i) if x <= limit => i } +``` + +## Translation of Calls to Extension Methods + +To convert a reference to an extension method, the compiler has to know about the extension +method. We say in this case that the extension method is _applicable_ at the point of reference. +There are four possible ways for an extension method to be applicable: + + 1. The extension method is visible under a simple name, by being defined or inherited or imported in a scope enclosing the reference. + 2. The extension method is a member of some given + instance that is visible at the point of the reference. + 3. The reference is of the form `r.m` and the extension method + is defined in the implicit scope of the type of `r`. + 4. The reference is of the form `r.m` and the extension method + is defined in some given instance in the implicit scope of the type of `r`. + +Here is an example for the first rule: + +```scala +trait IntOps: + extension (i: Int) def isZero: Boolean = i == 0 + + extension (i: Int) def safeMod(x: Int): Option[Int] = + // extension method defined in same scope IntOps + if x.isZero then None + else Some(i % x) + +object IntOpsEx extends IntOps: + extension (i: Int) def safeDiv(x: Int): Option[Int] = + // extension method brought into scope via inheritance from IntOps + if x.isZero then None + else Some(i / x) + +trait SafeDiv: + import IntOpsEx.* // brings safeDiv and safeMod into scope + + extension (i: Int) def divide(d: Int): Option[(Int, Int)] = + // extension methods imported and thus in scope + (i.safeDiv(d), i.safeMod(d)) match + case (Some(d), Some(r)) => Some((d, r)) + case _ => None +``` + +By the second rule, an extension method can be made available by defining a given instance containing it, like this: + +```scala +given ops1: IntOps() // brings safeMod into scope + +1.safeMod(2) +``` + +By the third and fourth rule, an extension method is available if it is in the implicit scope of the receiver type or in a given instance in that scope. Example: + +```scala +class List[T]: + ... +object List: + ... + extension [T](xs: List[List[T]]) + def flatten: List[T] = xs.foldLeft(List.empty[T])(_ ++ _) + + given [T: Ordering]: Ordering[List[T]] with + extension (xs: List[T]) + def < (ys: List[T]): Boolean = ... +end List + +// extension method available since it is in the implicit scope +// of List[List[Int]] +List(List(1, 2), List(3, 4)).flatten + +// extension method available since it is in the given Ordering[List[T]], +// which is itself in the implicit scope of List[Int] +List(1, 2) < List(3) +``` + +The precise rules for resolving a selection to an extension method are as follows. + +Assume a selection `e.m[Ts]` where `m` is not a member of `e`, where the type arguments `[Ts]` are optional, and where `T` is the expected type. +The following two rewritings are tried in order: + + 1. The selection is rewritten to `m[Ts](e)`. + 2. If the first rewriting does not typecheck with expected type `T`, + and there is an extension method `m` in some eligible object `o`, the selection is rewritten to `o.m[Ts](e)`. An object `o` is _eligible_ if + + - `o` forms part of the implicit scope of `T`, or + - `o` is a given instance that is visible at the point of the application, or + - `o` is a given instance in the implicit scope of `T`. + + This second rewriting is attempted at the time where the compiler also tries an implicit conversion + from `T` to a type containing `m`. If there is more than one way of rewriting, an ambiguity error results. + +An extension method can also be referenced using a simple identifier without a preceding expression. If an identifier `g` appears in the body of an extension method `f` and refers to an extension method `g` that is defined in the same collective extension + +```scala +extension (x: T) + def f ... = ... g ... + def g ... +``` + +the identifier is rewritten to `x.g`. This is also the case if `f` and `g` are the same method. Example: + +```scala +extension (s: String) + def position(ch: Char, n: Int): Int = + if n < s.length && s(n) != ch then position(ch, n + 1) + else n +``` + +The recursive call `position(ch, n + 1)` expands to `s.position(ch, n + 1)` in this case. The whole extension method rewrites to + +```scala +def position(s: String)(ch: Char, n: Int): Int = + if n < s.length && s(n) != ch then position(s)(ch, n + 1) + else n +``` + +## Syntax + +Here are the syntax changes for extension methods and collective extensions relative +to the [current syntax](../syntax.md). + +``` +BlockStat ::= ... | Extension +TemplateStat ::= ... | Extension +TopStat ::= ... | Extension +Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} + ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods +ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef +``` + +In the above the notation `<<< ts >>>` in the production rule `ExtMethods` is defined as follows : + +``` +<<< ts >>> ::= ‘{’ ts ‘}’ | indent ts outdent +``` + +`extension` is a soft keyword. It is recognized as a keyword only if it appears +at the start of a statement and is followed by `[` or `(`. In all other cases +it is treated as an identifier. diff --git a/docs/_spec/TODOreference/contextual/given-imports.md b/docs/_spec/TODOreference/contextual/given-imports.md new file mode 100644 index 000000000000..6a55368979b1 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/given-imports.md @@ -0,0 +1,117 @@ +--- +layout: doc-page +title: "Importing Givens" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/given-imports.html +--- + +A special form of import wildcard selector is used to import given instances. Example: + +```scala +object A: + class TC + given tc: TC = ??? + def f(using TC) = ??? + +object B: + import A.* + import A.given + ... +``` + +In the code above, the `import A.*` clause in object `B` imports all members +of `A` _except_ the given instance `tc`. Conversely, the second import `import A.given` will import _only_ that given instance. +The two import clauses can also be merged into one: + +```scala +object B: + import A.{given, *} + ... +``` + +Generally, a normal wildcard selector `*` brings all definitions other than givens or extensions into scope +whereas a `given` selector brings all givens (including those resulting from extensions) into scope. + +There are two main benefits arising from these rules: + +- It is made clearer where givens in scope are coming from. + In particular, it is not possible to hide imported givens in a long list of regular wildcard imports. +- It enables importing all givens + without importing anything else. This is particularly important since givens + can be anonymous, so the usual recourse of using named imports is not + practical. + +## Importing By Type + +Since givens can be anonymous it is not always practical to import them by their name, and wildcard imports are typically used instead. By-type imports provide a more specific alternative to wildcard imports, which makes it clearer what is imported. Example: + +```scala +import A.given TC +``` + +This imports any given in `A` that has a type which conforms to `TC`. Importing givens of several types `T1,...,Tn` +is expressed by multiple `given` selectors. + +```scala +import A.{given T1, ..., given Tn} +``` + +Importing all given instances of a parameterized type is expressed by wildcard arguments. +For instance, assuming the object + +```scala +object Instances: + given intOrd: Ordering[Int] = ... + given listOrd[T: Ordering]: Ordering[List[T]] = ... + given ec: ExecutionContext = ... + given im: Monoid[Int] = ... +``` + +the import clause + +```scala +import Instances.{given Ordering[?], given ExecutionContext} +``` + +would import the `intOrd`, `listOrd`, and `ec` instances but leave out the `im` instance, since it fits none of the specified bounds. + +By-type imports can be mixed with by-name imports. If both are present in an import clause, by-type imports come last. For instance, the import clause + +```scala +import Instances.{im, given Ordering[?]} +``` + +would import `im`, `intOrd`, and `listOrd` but leave out `ec`. + +## Migration + +The rules for imports stated above have the consequence that a library +would have to migrate in lockstep with all its users from old style implicits and +normal imports to givens and given imports. + +The following modifications avoid this hurdle to migration. + + 1. A `given` import selector also brings old style implicits into scope. So, in Scala 3.0 + an old-style implicit definition can be brought into scope either by a `*` or a `given` wildcard selector. + + 2. In Scala 3.1, old-style implicits accessed through a `*` wildcard import will give a deprecation warning. + + 3. In some version after 3.1, old-style implicits accessed through a `*` wildcard import will give a compiler error. + +These rules mean that library users can use `given` selectors to access old-style implicits in Scala 3.0, +and will be gently nudged and then forced to do so in later versions. Libraries can then switch to +given instances once their user base has migrated. + +## Syntax + +``` +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*' | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +``` diff --git a/docs/_spec/TODOreference/contextual/givens.md b/docs/_spec/TODOreference/contextual/givens.md new file mode 100644 index 000000000000..411d50ba63ea --- /dev/null +++ b/docs/_spec/TODOreference/contextual/givens.md @@ -0,0 +1,193 @@ +--- +layout: doc-page +title: "Given Instances" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/givens.html +--- + +Given instances (or, simply, "givens") define "canonical" values of certain types +that serve for synthesizing arguments to [context parameters](./using-clauses.md). Example: + +```scala +trait Ord[T]: + def compare(x: T, y: T): Int + extension (x: T) def < (y: T) = compare(x, y) < 0 + extension (x: T) def > (y: T) = compare(x, y) > 0 + +given intOrd: Ord[Int] with + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given listOrd[T](using ord: Ord[T]): Ord[List[T]] with + + def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = ord.compare(x, y) + if fst != 0 then fst else compare(xs1, ys1) + +``` + +This code defines a trait `Ord` with two given instances. `intOrd` defines +a given for the type `Ord[Int]` whereas `listOrd[T]` defines givens +for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]` +themselves. The `using` clause in `listOrd` defines a condition: There must be a +given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. +Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). + +## Anonymous Givens + +The name of a given can be left out. So the definitions +of the last section can also be expressed like this: + +```scala +given Ord[Int] with + ... +given [T](using Ord[T]): Ord[List[T]] with + ... +``` + +If the name of a given is missing, the compiler will synthesize a name from +the implemented type(s). + +**Note** The name synthesized by the compiler is chosen to be readable and reasonably concise. For instance, the two instances above would get the names: + +```scala +given_Ord_Int +given_Ord_List +``` + +The precise rules for synthesizing names are found [here](./relationship-implicits.html#anonymous-given-instances). These rules do not guarantee absence of name conflicts between +given instances of types that are "too similar". To avoid conflicts one can +use named instances. + +**Note** To ensure robust binary compatibility, publicly available libraries should prefer named instances. + +## Alias Givens + +An alias can be used to define a given instance that is equal to some expression. Example: + +```scala +given global: ExecutionContext = ForkJoinPool() +``` + +This creates a given `global` of type `ExecutionContext` that resolves to the right +hand side `ForkJoinPool()`. +The first time `global` is accessed, a new `ForkJoinPool` is created, which is then +returned for this and all subsequent accesses to `global`. This operation is thread-safe. + +Alias givens can be anonymous as well, e.g. + +```scala +given Position = enclosingTree.position +given (using config: Config): Factory = MemoizingFactory(config) +``` + +An alias given can have type parameters and context parameters just like any other given, +but it can only implement a single type. + +## Given Macros + +Given aliases can have the `inline` and `transparent` modifiers. +Example: + +```scala +transparent inline given mkAnnotations[A, T]: Annotations[A, T] = ${ + // code producing a value of a subtype of Annotations +} +``` + +Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. + +Given instances can have the `inline` but not `transparent` modifiers as their type is already known from the signature. +Example: + +```scala +trait Show[T] { + inline def show(x: T): String +} + +inline given Show[Foo] with { + /*transparent*/ inline def show(x: Foo): String = ${ ... } +} + +def app = + // inlines `show` method call and removes the call to `given Show[Foo]` + summon[Show[Foo]].show(foo) +``` +Note that the inline methods within the given instances may be `transparent`. + +The inlining of given instances will not inline/duplicate the implementation of the given, it will just inline the instantiation of that instance. +This is used to help dead code elimination of the given instances that are not used after inlining. + + +## Pattern-Bound Given Instances + +Given instances can also appear in patterns. Example: + +```scala +for given Context <- applicationContexts do + +pair match + case (ctx @ given Context, y) => ... +``` + +In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` +instance named `ctx` is established by matching against the first half of the `pair` selector. + +In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. + +## Negated Givens + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement the analogue of a "negated" search in implicit resolution, +where a query Q1 fails if some other query Q2 succeeds and Q1 succeeds if Q2 fails. With the new cleaned up behavior these techniques no longer work. +But the new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) now implements negation directly. + +For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit +search for `Q` fails, for example: + +```scala +import scala.util.NotGiven + +trait Tagged[A] + +case class Foo[A](value: Boolean) +object Foo: + given fooTagged[A](using Tagged[A]): Foo[A] = Foo(true) + given fooNotTagged[A](using NotGiven[Tagged[A]]): Foo[A] = Foo(false) + +@main def test(): Unit = + given Tagged[Int]() + assert(summon[Foo[Int]].value) // fooTagged is found + assert(!summon[Foo[String]].value) // fooNotTagged is found +``` + +## Given Instance Initialization + +A given instance without type or context parameters is initialized on-demand, the first +time it is accessed. If a given has type or context parameters, a fresh instance +is created for each reference. + +## Syntax + +Here is the syntax for given instances: + +``` +TmplDef ::= ... + | ‘given’ GivenDef +GivenDef ::= [GivenSig] StructuralInstance + | [GivenSig] AnnotType ‘=’ Expr + | [GivenSig] AnnotType +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} ‘with’ TemplateBody +``` + +A given instance starts with the reserved word `given` and an optional _signature_. The signature +defines a name and/or parameters for the instance. It is followed by `:`. There are three kinds +of given instances: + +- A _structural instance_ contains one or more types or constructor applications, + followed by `with` and a template body that contains member definitions of the instance. +- An _alias instance_ contains a type, followed by `=` and a right-hand side expression. +- An _abstract instance_ contains just the type, which is not followed by anything. diff --git a/docs/_spec/TODOreference/contextual/multiversal-equality.md b/docs/_spec/TODOreference/contextual/multiversal-equality.md new file mode 100644 index 000000000000..e9a81b95f472 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/multiversal-equality.md @@ -0,0 +1,227 @@ +--- +layout: doc-page +title: "Multiversal Equality" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/multiversal-equality.html +--- + +Previously, Scala had universal equality: Two values of any types +could be compared with each other with `==` and `!=`. This came from +the fact that `==` and `!=` are implemented in terms of Java's +`equals` method, which can also compare values of any two reference +types. + +Universal equality is convenient. But it is also dangerous since it +undermines type safety. For instance, let's assume one is left after some refactoring +with an erroneous program where a value `y` has type `S` instead of the correct type `T`. + +```scala +val x = ... // of type T +val y = ... // of type S, but should be T +x == y // typechecks, will always yield false +``` + +If `y` gets compared to other values of type `T`, +the program will still typecheck, since values of all types can be compared with each other. +But it will probably give unexpected results and fail at runtime. + +Multiversal equality is an opt-in way to make universal equality safer. +It uses a binary type class [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/CanEqual.scala) +to indicate that values of two given types can be compared with each other. +The example above would not typecheck if `S` or `T` was a class +that derives `CanEqual`, e.g. + +```scala +class T derives CanEqual +``` + +Alternatively, one can also provide a `CanEqual` given instance directly, like this: + +```scala +given CanEqual[T, T] = CanEqual.derived +``` + +This definition effectively says that values of type `T` can (only) be +compared to other values of type `T` when using `==` or `!=`. The definition +affects type checking but it has no significance for runtime +behavior, since `==` always maps to `equals` and `!=` always maps to +the negation of `equals`. The right-hand side `CanEqual.derived` of the definition +is a value that has any `CanEqual` instance as its type. Here is the definition of class +`CanEqual` and its companion object: + +```scala +package scala +import annotation.implicitNotFound + +@implicitNotFound("Values of types ${L} and ${R} cannot be compared with == or !=") +sealed trait CanEqual[-L, -R] + +object CanEqual: + object derived extends CanEqual[Any, Any] +``` + +One can have several `CanEqual` given instances for a type. For example, the four +definitions below make values of type `A` and type `B` comparable with +each other, but not comparable to anything else: + +```scala +given CanEqual[A, A] = CanEqual.derived +given CanEqual[B, B] = CanEqual.derived +given CanEqual[A, B] = CanEqual.derived +given CanEqual[B, A] = CanEqual.derived +``` + +The [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/CanEqual.scala) +object defines a number of `CanEqual` given instances that together +define a rule book for what standard types can be compared (more details below). + +There is also a "fallback" instance named `canEqualAny` that allows comparisons +over all types that do not themselves have a `CanEqual` given. `canEqualAny` is defined as follows: + +```scala +def canEqualAny[L, R]: CanEqual[L, R] = CanEqual.derived +``` + +Even though `canEqualAny` is not declared as `given`, the compiler will still +construct an `canEqualAny` instance as answer to an implicit search for the +type `CanEqual[L, R]`, unless `L` or `R` have `CanEqual` instances +defined on them, or the language feature `strictEquality` is enabled. + +The primary motivation for having `canEqualAny` is backwards compatibility. +If this is of no concern, one can disable `canEqualAny` by enabling the language +feature `strictEquality`. As for all language features this can be either +done with an import + +```scala +import scala.language.strictEquality +``` +or with a command line option `-language:strictEquality`. + +## Deriving CanEqual Instances + +Instead of defining `CanEqual` instances directly, it is often more convenient to derive them. Example: + +```scala +class Box[T](x: T) derives CanEqual +``` + +By the usual rules of [type class derivation](./derivation.md), +this generates the following `CanEqual` instance in the companion object of `Box`: + +```scala +given [T, U](using CanEqual[T, U]): CanEqual[Box[T], Box[U]] = + CanEqual.derived +``` + +That is, two boxes are comparable with `==` or `!=` if their elements are. Examples: + +```scala +new Box(1) == new Box(1L) // ok since there is an instance for `CanEqual[Int, Long]` +new Box(1) == new Box("a") // error: can't compare +new Box(1) == 1 // error: can't compare +``` + +## Precise Rules for Equality Checking + +The precise rules for equality checking are as follows. + +If the `strictEquality` feature is enabled then +a comparison using `x == y` or `x != y` between values `x: T` and `y: U` +is legal if there is a `given` of type `CanEqual[T, U]`. + +In the default case where the `strictEquality` feature is not enabled the comparison is +also legal if + + 1. `T` and `U` are the same, or + 2. one of `T`, `U` is a subtype of the _lifted_ version of the other type, or + 3. neither `T` nor `U` have a _reflexive_ `CanEqual` instance. + +Explanations: + + - _lifting_ a type `S` means replacing all references to abstract types + in covariant positions of `S` by their upper bound, and replacing + all refinement types in covariant positions of `S` by their parent. + - a type `T` has a _reflexive_ `CanEqual` instance if the implicit search for `CanEqual[T, T]` + succeeds. + +## Predefined CanEqual Instances + +The `CanEqual` object defines instances for comparing + - the primitive types `Byte`, `Short`, `Char`, `Int`, `Long`, `Float`, `Double`, `Boolean`, and `Unit`, + - `java.lang.Number`, `java.lang.Boolean`, and `java.lang.Character`, + - `scala.collection.Seq`, and `scala.collection.Set`. + +Instances are defined so that every one of these types has a _reflexive_ `CanEqual` instance, and the following holds: + + - Primitive numeric types can be compared with each other. + - Primitive numeric types can be compared with subtypes of `java.lang.Number` (and _vice versa_). + - `Boolean` can be compared with `java.lang.Boolean` (and _vice versa_). + - `Char` can be compared with `java.lang.Character` (and _vice versa_). + - Two sequences (of arbitrary subtypes of `scala.collection.Seq`) can be compared + with each other if their element types can be compared. The two sequence types + need not be the same. + - Two sets (of arbitrary subtypes of `scala.collection.Set`) can be compared + with each other if their element types can be compared. The two set types + need not be the same. + - Any subtype of `AnyRef` can be compared with `Null` (and _vice versa_). + +## Why Two Type Parameters? + +One particular feature of the `CanEqual` type is that it takes _two_ type parameters, representing the types of the two items to be compared. By contrast, conventional +implementations of an equality type class take only a single type parameter which represents the common type of _both_ operands. +One type parameter is simpler than two, so why go through the additional complication? The reason has to do with the fact that, rather than coming up with a type class where no operation existed before, +we are dealing with a refinement of pre-existing, universal equality. It is best illustrated through an example. + +Say you want to come up with a safe version of the `contains` method on `List[T]`. The original definition of `contains` in the standard library was: +```scala +class List[+T]: + ... + def contains(x: Any): Boolean +``` +That uses universal equality in an unsafe way since it permits arguments of any type to be compared with the list's elements. The "obvious" alternative definition +```scala + def contains(x: T): Boolean +``` +does not work, since it refers to the covariant parameter `T` in a nonvariant context. The only variance-correct way to use the type parameter `T` in `contains` is as a lower bound: +```scala + def contains[U >: T](x: U): Boolean +``` +This generic version of `contains` is the one used in the current (Scala 2.13) version of `List`. +It looks different but it admits exactly the same applications as the `contains(x: Any)` definition we started with. +However, we can make it more useful (i.e. restrictive) by adding a `CanEqual` parameter: +```scala + def contains[U >: T](x: U)(using CanEqual[T, U]): Boolean // (1) +``` +This version of `contains` is equality-safe! More precisely, given +`x: T`, `xs: List[T]` and `y: U`, then `xs.contains(y)` is type-correct if and only if +`x == y` is type-correct. + +Unfortunately, the crucial ability to "lift" equality type checking from simple equality and pattern matching to arbitrary user-defined operations gets lost if we restrict ourselves to an equality class with a single type parameter. Consider the following signature of `contains` with a hypothetical `CanEqual1[T]` type class: +```scala + def contains[U >: T](x: U)(using CanEqual1[U]): Boolean // (2) +``` +This version could be applied just as widely as the original `contains(x: Any)` method, +since the `CanEqual1[Any]` fallback is always available! So we have gained nothing. What got lost in the transition to a single parameter type class was the original rule that `CanEqual[A, B]` is available only if neither `A` nor `B` have a reflexive `CanEqual` instance. That rule simply cannot be expressed if there is a single type parameter for `CanEqual`. + +The situation is different under `-language:strictEquality`. In that case, +the `CanEqual[Any, Any]` or `CanEqual1[Any]` instances would never be available, and the +single and two-parameter versions would indeed coincide for most practical purposes. + +But assuming `-language:strictEquality` immediately and everywhere poses migration problems which might well be unsurmountable. Consider again `contains`, which is in the standard library. Parameterizing it with the `CanEqual` type class as in (1) is an immediate win since it rules out non-sensical applications while still allowing all sensible ones. +So it can be done almost at any time, modulo binary compatibility concerns. +On the other hand, parameterizing `contains` with `CanEqual1` as in (2) would make `contains` +unusable for all types that have not yet declared a `CanEqual1` instance, including all +types coming from Java. This is clearly unacceptable. It would lead to a situation where, +rather than migrating existing libraries to use safe equality, the only upgrade path is to have parallel libraries, with the new version only catering to types deriving `CanEqual1` and the old version dealing with everything else. Such a split of the ecosystem would be very problematic, which means the cure is likely to be worse than the disease. + +For these reasons, it looks like a two-parameter type class is the only way forward because it can take the existing ecosystem where it is and migrate it towards a future where more and more code uses safe equality. + +In applications where `-language:strictEquality` is the default one could also introduce a one-parameter type alias such as +```scala +type Eq[-T] = CanEqual[T, T] +``` +Operations needing safe equality could then use this alias instead of the two-parameter `CanEqual` class. But it would only +work under `-language:strictEquality`, since otherwise the universal `Eq[Any]` instance would be available everywhere. + + +More on multiversal equality is found in a [blog post](http://www.scala-lang.org/blog/2016/05/06/multiversal-equality.html) +and a [GitHub issue](https://github.com/lampepfl/dotty/issues/1247). diff --git a/docs/_spec/TODOreference/contextual/relationship-implicits.md b/docs/_spec/TODOreference/contextual/relationship-implicits.md new file mode 100644 index 000000000000..fce07f51151a --- /dev/null +++ b/docs/_spec/TODOreference/contextual/relationship-implicits.md @@ -0,0 +1,206 @@ +--- +layout: doc-page +title: "Relationship with Scala 2 Implicits" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/relationship-implicits.html +--- + +Many, but not all, of the new contextual abstraction features in Scala 3 can be mapped to Scala 2's implicits. This page gives a rundown on the relationships between new and old features. + +## Simulating Scala 3 Contextual Abstraction Concepts with Scala 2 Implicits + +### Given Instances + +Given instances can be mapped to combinations of implicit objects, classes and implicit methods. + + 1. Given instances without parameters are mapped to implicit objects. For instance, + + ```scala + given intOrd: Ord[Int] with { ... } + ``` + + maps to + + ```scala + implicit object intOrd extends Ord[Int] { ... } + ``` + + 2. Parameterized givens are mapped to combinations of classes and implicit methods. For instance, + + ```scala + given listOrd[T](using ord: Ord[T]): Ord[List[T]] with { ... } + ``` + + maps to + + ```scala + class listOrd[T](implicit ord: Ord[T]) extends Ord[List[T]] { ... } + final implicit def listOrd[T](implicit ord: Ord[T]): listOrd[T] = + new listOrd[T] + ``` + + 3. Alias givens map to implicit methods or implicit lazy vals. If an alias has neither type nor context parameters, + it is treated as a lazy val, unless the right-hand side is a simple reference, in which case we can use a forwarder to + that reference without caching it. + +Examples: + +```scala +given global: ExecutionContext = new ForkJoinContext() + +val ctx: Context +given Context = ctx +``` + +would map to + +```scala +final implicit lazy val global: ExecutionContext = new ForkJoinContext() +final implicit def given_Context = ctx +``` + +### Anonymous Given Instances + +Anonymous given instances get compiler synthesized names, which are generated in a reproducible way from the implemented type(s). For example, if the names of the `IntOrd` and `ListOrd` givens above were left out, the following names would be synthesized instead: + +```scala +given given_Ord_Int: Ord[Int] with { ... } +given given_Ord_List[T](using ord: Ord[T]): Ord[List[T]] with { ... } +``` + +The synthesized type names are formed from + +1. the prefix `given_`, +2. the simple name(s) of the implemented type(s), leaving out any prefixes, +3. the simple name(s) of the top-level argument type constructors to these types. + +Tuples are treated as transparent, i.e. a type `F[(X, Y)]` would get the synthesized name +`F_X_Y`. Directly implemented function types `A => B` are represented as `A_to_B`. Function types used as arguments to other type constructors are represented as `Function`. + +### Using Clauses + +Using clauses correspond largely to Scala 2's implicit parameter clauses. E.g. + +```scala +def max[T](x: T, y: T)(using ord: Ord[T]): T +``` + +would be written + +```scala +def max[T](x: T, y: T)(implicit ord: Ord[T]): T +``` + +in Scala 2. The main difference concerns applications of such parameters. +Explicit arguments to parameters of using clauses _must_ be written using `(using ...)`, +mirroring the definition syntax. E.g, `max(2, 3)(using IntOrd)`. +Scala 2 uses normal applications `max(2, 3)(IntOrd)` instead. The Scala 2 syntax has some inherent ambiguities and restrictions which are overcome by the new syntax. For instance, multiple implicit parameter lists are not available in the old syntax, even though they can be simulated using auxiliary objects in the "Aux" pattern. + +The `summon` method corresponds to `implicitly` in Scala 2. +It is precisely the same as the `the` method in [Shapeless](https://github.com/milessabin/shapeless). +The difference between `summon` (or `the`) and `implicitly` is +that `summon` can return a more precise type than the type that was +asked for. + +### Context Bounds + +Context bounds are the same in both language versions. They expand to the respective forms of implicit parameters. + +**Note:** To ease migration, context bounds in Scala 3 map for a limited time to old-style implicit parameters for which arguments can be passed either in a using clause or +in a normal argument list. Once old-style implicits are deprecated, context bounds +will map to using clauses instead. + +### Extension Methods + +Extension methods have no direct counterpart in Scala 2, but they can be simulated with implicit classes. For instance, the extension method + +```scala +extension (c: Circle) + def circumference: Double = c.radius * math.Pi * 2 +``` + +could be simulated to some degree by + +```scala +implicit class CircleDecorator(c: Circle) extends AnyVal { + def circumference: Double = c.radius * math.Pi * 2 +} +``` + +Abstract extension methods in traits that are implemented in given instances have no direct counterpart in Scala 2. The only way to simulate these is to make implicit classes available through imports. The Simulacrum macro library can automate this process in some cases. + +### Type Class Derivation + +Type class derivation has no direct counterpart in the Scala 2 language. Comparable functionality can be achieved by macro-based libraries such as [Shapeless](https://github.com/milessabin/shapeless), [Magnolia](https://propensive.com/opensource/magnolia), or [scalaz-deriving](https://github.com/scalaz/scalaz-deriving). + +### Context Function Types + +Context function types have no analogue in Scala 2. + +### Implicit By-Name Parameters + +Implicit by-name parameters are not supported in Scala 2, but can be emulated to some degree by the `Lazy` type in Shapeless. + +## Simulating Scala 2 Implicits in Scala 3 + +### Implicit Conversions + +Implicit conversion methods in Scala 2 can be expressed as given instances of the `scala.Conversion` class in Scala 3. For instance, instead of + +```scala +implicit def stringToToken(str: String): Token = new Keyword(str) +``` + +one can write + +```scala +given stringToToken: Conversion[String, Token] with + def apply(str: String): Token = KeyWord(str) +``` + +or + +```scala +given stringToToken: Conversion[String, Token] = KeyWord(_) +``` + +### Implicit Classes + +Implicit classes in Scala 2 are often used to define extension methods, which are directly supported in Scala 3. Other uses of implicit classes can be simulated by a pair of a regular class and a given `Conversion` instance. + +### Implicit Values + +Implicit `val` definitions in Scala 2 can be expressed in Scala 3 using a regular `val` definition and an alias given. +For instance, Scala 2's + +```scala +lazy implicit val pos: Position = tree.sourcePos +``` + +can be expressed in Scala 3 as + +```scala +lazy val pos: Position = tree.sourcePos +given Position = pos +``` + +### Abstract Implicits + +An abstract implicit `val` or `def` in Scala 2 can be expressed in Scala 3 using a regular abstract definition and an alias given. For instance, Scala 2's + +```scala +implicit def symDecorator: SymDecorator +``` + +can be expressed in Scala 3 as + +```scala +def symDecorator: SymDecorator +given SymDecorator = symDecorator +``` + +## Implementation Status and Timeline + +The Scala 3 implementation implements both Scala 2's implicits and the new abstractions. In fact, support for Scala 2's implicits is an essential part of the common language subset between 2.13 and Scala 3. +Migration to the new abstractions will be supported by making automatic rewritings available. + +Depending on adoption patterns, old style implicits might start to be deprecated in a version following Scala 3.0. diff --git a/docs/_spec/TODOreference/contextual/right-associative-extension-methods.md b/docs/_spec/TODOreference/contextual/right-associative-extension-methods.md new file mode 100644 index 000000000000..068123df8cd2 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/right-associative-extension-methods.md @@ -0,0 +1,52 @@ +--- +layout: doc-page +title: "Right-Associative Extension Methods: Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/right-associative-extension-methods.html +--- + +The most general form of leading parameters of an extension method is as follows: + + - A possibly empty list of using clauses `leadingUsing` + - A single parameter `extensionParam` + - A possibly empty list of using clauses `trailingUsing` + +This is then followed by `def`, the method name, and possibly further parameters +`otherParams`. An example is: + +```scala + extension (using a: A, b: B)(using c: C) // <-- leadingUsing + (x: X) // <-- extensionParam + (using d: D) // <-- trailingUsing + def +:: (y: Y)(using e: E)(z: Z) // <-- otherParams +``` + +An extension method is treated as a right-associative operator +(as in [SLS §6.12.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#infix-operations)) +if it has a name ending in `:` and is immediately followed by a +single parameter. In the example above, that parameter is `(y: Y)`. + +The Scala compiler pre-processes a right-associative infix operation such as `x +: xs` +to `xs.+:(x)` if `x` is a pure expression or a call-by-name parameter and to `val y = x; xs.+:(y)` otherwise. This is necessary since a regular right-associative infix method +is defined in the class of its right operand. To make up for this swap, +the expansion of right-associative extension methods performs an analogous parameter swap. More precisely, if `otherParams` consists of a single parameter +`rightParam` followed by `remaining`, the total parameter sequence +of the extension method's expansion is: + +``` + leadingUsing rightParam trailingUsing extensionParam remaining +``` + +For instance, the `+::` method above would become + +```scala + def +:: (using a: A, b: B)(using c: C) + (y: Y) + (using d: D) + (x: X) + (using e: E)(z: Z) +``` + +This expansion has to be kept in mind when writing right-associative extension +methods with inter-parameter dependencies. + +An overall simpler design could be obtained if right-associative operators could _only_ be defined as extension methods, and would be disallowed as normal methods. In that case neither arguments nor parameters would have to be swapped. Future versions of Scala should strive to achieve this simplification. diff --git a/docs/_spec/TODOreference/contextual/type-classes.md b/docs/_spec/TODOreference/contextual/type-classes.md new file mode 100644 index 000000000000..9fc0d2eec864 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/type-classes.md @@ -0,0 +1,282 @@ +--- +layout: doc-page +title: "Implementing Type classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/type-classes.html +--- + +A _type class_ is an abstract, parameterized type that lets you add new behavior to any closed data type without using sub-typing. This can be useful in multiple use-cases, for example: + +* expressing how a type you don't own (from the standard or 3rd-party library) conforms to such behavior +* expressing such a behavior for multiple types without involving sub-typing relationships (one `extends` another) between those types (see: [ad hoc polymorphism](https://en.wikipedia.org/wiki/Ad_hoc_polymorphism) for instance) + +Therefore in Scala 3, _type classes_ are just _traits_ with one or more parameters whose implementations are not defined through the `extends` keyword, but by **given instances**. +Here are some examples of common type classes: + +## Semigroups and monoids + +Here's the `Monoid` type class definition: + +```scala +trait SemiGroup[T]: + extension (x: T) def combine (y: T): T + +trait Monoid[T] extends SemiGroup[T]: + def unit: T +``` + +An implementation of this `Monoid` type class for the type `String` can be the following: + +```scala +given Monoid[String] with + extension (x: String) def combine (y: String): String = x.concat(y) + def unit: String = "" +``` + +Whereas for the type `Int` one could write the following: + +```scala +given Monoid[Int] with + extension (x: Int) def combine (y: Int): Int = x + y + def unit: Int = 0 +``` + +This monoid can now be used as _context bound_ in the following `combineAll` method: + +```scala +def combineAll[T: Monoid](xs: List[T]): T = + xs.foldLeft(summon[Monoid[T]].unit)(_.combine(_)) +``` + +To get rid of the `summon[...]` we can define a `Monoid` object as follows: + +```scala +object Monoid: + def apply[T](using m: Monoid[T]) = m +``` + +Which would allow to re-write the `combineAll` method this way: + +```scala +def combineAll[T: Monoid](xs: List[T]): T = + xs.foldLeft(Monoid[T].unit)(_.combine(_)) +``` + +## Functors + +A `Functor` for a type provides the ability for its values to be "mapped over", i.e. apply a function that transforms inside a value while remembering its shape. For example, to modify every element of a collection without dropping or adding elements. +We can represent all types that can be "mapped over" with `F`. It's a type constructor: the type of its values becomes concrete when provided a type argument. +Therefore we write it `F[_]`, hinting that the type `F` takes another type as argument. +The definition of a generic `Functor` would thus be written as: + +```scala +trait Functor[F[_]]: + def map[A, B](x: F[A], f: A => B): F[B] +``` + +Which could read as follows: "A `Functor` for the type constructor `F[_]` represents the ability to transform `F[A]` to `F[B]` through the application of function `f` with type `A => B`". We call the `Functor` definition here a _type class_. +This way, we could define an instance of `Functor` for the `List` type: + +```scala +given Functor[List] with + def map[A, B](x: List[A], f: A => B): List[B] = + x.map(f) // List already has a `map` method +``` + +With this `given` instance in scope, everywhere a `Functor` is expected, the compiler will accept a `List` to be used. + +For instance, we may write such a testing method: + +```scala +def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = + assert(expected == summon[Functor[F]].map(original, mapping)) +``` + +And use it this way, for example: + +```scala +assertTransformation(List("a1", "b1"), List("a", "b"), elt => s"${elt}1") +``` + +That's a first step, but in practice we probably would like the `map` function to be a method directly accessible on the type `F`. So that we can call `map` directly on instances of `F`, and get rid of the `summon[Functor[F]]` part. +As in the previous example of Monoids, [`extension` methods](extension-methods.md) help achieving that. Let's re-define the `Functor` type class with extension methods. + +```scala +trait Functor[F[_]]: + extension [A](x: F[A]) + def map[B](f: A => B): F[B] +``` + +The instance of `Functor` for `List` now becomes: + +```scala +given Functor[List] with + extension [A](xs: List[A]) + def map[B](f: A => B): List[B] = + xs.map(f) // List already has a `map` method + +``` + +It simplifies the `assertTransformation` method: + +```scala +def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = + assert(expected == original.map(mapping)) +``` + +The `map` method is now directly used on `original`. It is available as an extension method +since `original`'s type is `F[A]` and a given instance for `Functor[F[A]]` which defines `map` +is in scope. + +## Monads + +Applying `map` in `Functor[List]` to a mapping function of type `A => B` results in a `List[B]`. So applying it to a mapping function of type `A => List[B]` results in a `List[List[B]]`. To avoid managing lists of lists, we may want to "flatten" the values in a single list. + +That's where `Monad` comes in. A `Monad` for type `F[_]` is a `Functor[F]` with two more operations: + +* `flatMap`, which turns an `F[A]` into an `F[B]` when given a function of type `A => F[B]`, +* `pure`, which creates an `F[A]` from a single value `A`. + +Here is the translation of this definition in Scala 3: + +```scala +trait Monad[F[_]] extends Functor[F]: + + /** The unit value for a monad */ + def pure[A](x: A): F[A] + + extension [A](x: F[A]) + /** The fundamental composition operation */ + def flatMap[B](f: A => F[B]): F[B] + + /** The `map` operation can now be defined in terms of `flatMap` */ + def map[B](f: A => B) = x.flatMap(f.andThen(pure)) + +end Monad +``` + +### List + +A `List` can be turned into a monad via this `given` instance: + +```scala +given listMonad: Monad[List] with + def pure[A](x: A): List[A] = + List(x) + extension [A](xs: List[A]) + def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) // rely on the existing `flatMap` method of `List` +``` + +Since `Monad` is a subtype of `Functor`, `List` is also a functor. The Functor's `map` +operation is already provided by the `Monad` trait, so the instance does not need to define +it explicitly. + +### Option + +`Option` is an other type having the same kind of behaviour: + +```scala +given optionMonad: Monad[Option] with + def pure[A](x: A): Option[A] = + Option(x) + extension [A](xo: Option[A]) + def flatMap[B](f: A => Option[B]): Option[B] = xo match + case Some(x) => f(x) + case None => None +``` + +### Reader + +Another example of a `Monad` is the _Reader_ Monad, which acts on functions instead of +data types like `List` or `Option`. It can be used to combine multiple functions +that all need the same parameter. For instance multiple functions needing access to some configuration, context, environment variables, etc. + +Let's define a `Config` type, and two functions using it: + +```scala +trait Config +// ... +def compute(i: Int)(config: Config): String = ??? +def show(str: String)(config: Config): Unit = ??? +``` + +We may want to combine `compute` and `show` into a single function, accepting a `Config` as parameter, and showing the result of the computation, and we'd like to use +a monad to avoid passing the parameter explicitly multiple times. So postulating +the right `flatMap` operation, we could write: + +```scala +def computeAndShow(i: Int): Config => Unit = compute(i).flatMap(show) +``` + +instead of + +```scala +show(compute(i)(config))(config) +``` + +Let's define this m then. First, we are going to define a type named `ConfigDependent` representing a function that when passed a `Config` produces a `Result`. + +```scala +type ConfigDependent[Result] = Config => Result +``` + +The monad instance will look like this: + +```scala +given configDependentMonad: Monad[ConfigDependent] with + + def pure[A](x: A): ConfigDependent[A] = + config => x + + extension [A](x: ConfigDependent[A]) + def flatMap[B](f: A => ConfigDependent[B]): ConfigDependent[B] = + config => f(x(config))(config) + +end configDependentMonad +``` + +The type `ConfigDependent` can be written using [type lambdas](../new-types/type-lambdas.md): + +```scala +type ConfigDependent = [Result] =>> Config => Result +``` + +Using this syntax would turn the previous `configDependentMonad` into: + +```scala +given configDependentMonad: Monad[[Result] =>> Config => Result] with + + def pure[A](x: A): Config => A = + config => x + + extension [A](x: Config => A) + def flatMap[B](f: A => Config => B): Config => B = + config => f(x(config))(config) + +end configDependentMonad +``` + +It is likely that we would like to use this pattern with other kinds of environments than our `Config` trait. The Reader monad allows us to abstract away `Config` as a type _parameter_, named `Ctx` in the following definition: + +```scala +given readerMonad[Ctx]: Monad[[X] =>> Ctx => X] with + + def pure[A](x: A): Ctx => A = + ctx => x + + extension [A](x: Ctx => A) + def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(x(ctx))(ctx) + +end readerMonad +``` + +## Summary + +The definition of a _type class_ is expressed with a parameterised type with abstract members, such as a `trait`. +The main difference between subtype polymorphism and ad-hoc polymorphism with _type classes_ is how the definition of the _type class_ is implemented, in relation to the type it acts upon. +In the case of a _type class_, its implementation for a concrete type is expressed through a `given` instance definition, which is supplied as an implicit argument alongside the value it acts upon. With subtype polymorphism, the implementation is mixed into the parents of a class, and only a single term is required to perform a polymorphic operation. The type class solution +takes more effort to set up, but is more extensible: Adding a new interface to a +class requires changing the source code of that class. But contrast, instances for type classes can be defined anywhere. + +To conclude, we have seen that traits and given instances, combined with other constructs like extension methods, context bounds and type lambdas allow a concise and natural expression of _type classes_. diff --git a/docs/_spec/TODOreference/contextual/using-clauses.md b/docs/_spec/TODOreference/contextual/using-clauses.md new file mode 100644 index 000000000000..9187e1916e7d --- /dev/null +++ b/docs/_spec/TODOreference/contextual/using-clauses.md @@ -0,0 +1,153 @@ +--- +layout: doc-page +title: "Using Clauses" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/using-clauses.html +--- + +Functional programming tends to express most dependencies as simple function parameterization. +This is clean and powerful, but it sometimes leads to functions that take many parameters where the same value is passed over and over again in long call chains to many +functions. Context parameters can help here since they enable the compiler to synthesize +repetitive arguments instead of the programmer having to write them explicitly. + +For example, with the [given instances](./givens.md) defined previously, +a `max` function that works for any arguments for which an ordering exists can be defined as follows: + +```scala +def max[T](x: T, y: T)(using ord: Ord[T]): T = + if ord.compare(x, y) < 0 then y else x +``` + +Here, `ord` is a _context parameter_ introduced with a `using` clause. +The `max` function can be applied as follows: + +```scala +max(2, 3)(using intOrd) +``` + +The `(using intOrd)` part passes `intOrd` as an argument for the `ord` parameter. But the point of context parameters is that this argument can also be left out (and it usually is). So the following applications are equally valid: + +```scala +max(2, 3) +max(List(1, 2, 3), Nil) +``` + +## Anonymous Context Parameters + +In many situations, the name of a context parameter need not be +mentioned explicitly at all, since it is used only in synthesized arguments for +other context parameters. In that case one can avoid defining a parameter name +and just provide its type. Example: + +```scala +def maximum[T](xs: List[T])(using Ord[T]): T = + xs.reduceLeft(max) +``` + +`maximum` takes a context parameter of type `Ord[T]` only to pass it on as an +inferred argument to `max`. The name of the parameter is left out. + +Generally, context parameters may be defined either as a full parameter list `(p_1: T_1, ..., p_n: T_n)` or just as a sequence of types `T_1, ..., T_n`. Vararg parameters are not supported in `using` clauses. + +## Class Context Parameters + +If a class context parameter is made a member by adding a `val` or `var` modifier, +then that member is available as a given instance. + +Compare the following examples, where the attempt to supply an explicit `given` member induces an ambiguity: + +```scala +class GivenIntBox(using val givenInt: Int): + def n = summon[Int] + +class GivenIntBox2(using givenInt: Int): + given Int = givenInt + //def n = summon[Int] // ambiguous +``` + +The `given` member is importable as explained in the section on [importing `given`s](./given-imports.md): + +```scala +val b = GivenIntBox(using 23) +import b.given +summon[Int] // 23 + +import b.* +//givenInt // Not found +``` + +## Inferring Complex Arguments + +Here are two other methods that have a context parameter of type `Ord[T]`: + +```scala +def descending[T](using asc: Ord[T]): Ord[T] = new Ord[T]: + def compare(x: T, y: T) = asc.compare(y, x) + +def minimum[T](xs: List[T])(using Ord[T]) = + maximum(xs)(using descending) +``` + +The `minimum` method's right-hand side passes `descending` as an explicit argument to `maximum(xs)`. +With this setup, the following calls are all well-formed, and they all normalize to the last one: + +```scala +minimum(xs) +maximum(xs)(using descending) +maximum(xs)(using descending(using listOrd)) +maximum(xs)(using descending(using listOrd(using intOrd))) +``` + +## Multiple `using` Clauses + +There can be several `using` clauses in a definition and `using` clauses can be freely mixed with normal parameter clauses. Example: + +```scala +def f(u: Universe)(using ctx: u.Context)(using s: ctx.Symbol, k: ctx.Kind) = ... +``` + +Multiple `using` clauses are matched left-to-right in applications. Example: + +```scala +object global extends Universe { type Context = ... } +given ctx : global.Context with { type Symbol = ...; type Kind = ... } +given sym : ctx.Symbol +given kind: ctx.Kind + +``` +Then the following calls are all valid (and normalize to the last one) + +```scala +f(global) +f(global)(using ctx) +f(global)(using ctx)(using sym, kind) +``` + +But `f(global)(using sym, kind)` would give a type error. + + +## Summoning Instances + +The method `summon` in `Predef` returns the given of a specific type. For example, +the given instance for `Ord[List[Int]]` is produced by + +```scala +summon[Ord[List[Int]]] // reduces to listOrd(using intOrd) +``` + +The `summon` method is simply defined as the (non-widening) identity function over a context parameter. + +```scala +def summon[T](using x: T): x.type = x +``` + +## Syntax + +Here is the new syntax of parameters and arguments seen as a delta from the [standard context free syntax of Scala 3](../syntax.md). `using` is a soft keyword, recognized only at the start of a parameter or argument list. It can be used as a normal identifier everywhere else. + +``` +ClsParamClause ::= ... | UsingClsParamClause +DefParamClauses ::= ... | UsingParamClause +UsingClsParamClause ::= ‘(’ ‘using’ (ClsParams | Types) ‘)’ +UsingParamClause ::= ‘(’ ‘using’ (DefParams | Types) ‘)’ +ParArgumentExprs ::= ... | ‘(’ ‘using’ ExprsInParens ‘)’ +``` diff --git a/docs/_spec/TODOreference/dropped-features/auto-apply.md b/docs/_spec/TODOreference/dropped-features/auto-apply.md new file mode 100644 index 000000000000..eadfe2f429ea --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/auto-apply.md @@ -0,0 +1,96 @@ +--- +layout: doc-page +title: "Dropped: Auto-Application" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/auto-apply.html +--- + +Previously an empty argument list `()` was implicitly inserted when +calling a nullary method without arguments. Example: + +```scala +def next(): T = ... +next // is expanded to next() +``` + +In Scala 3, this idiom is an error. + +```scala +next +^ +missing arguments for method next +``` + +In Scala 3, the application syntax has to follow exactly the parameter +syntax. Excluded from this rule are methods that are defined in Java +or that override methods defined in Java. The reason for being more +lenient with such methods is that otherwise everyone would have to +write + +```scala +xs.toString().length() +``` + +instead of + +```scala +xs.toString.length +``` + +The latter is idiomatic Scala because it conforms to the _uniform +access principle_. This principle states that one should be able to +change an object member from a field to a non-side-effecting method +and back without affecting clients that access the +member. Consequently, Scala encourages to define such "property" +methods without a `()` parameter list whereas side-effecting methods +should be defined with it. Methods defined in Java cannot make this +distinction; for them a `()` is always mandatory. So Scala fixes the +problem on the client side, by allowing the parameterless references. +But where Scala allows that freedom for all method references, Scala 3 +restricts it to references of external methods that are not defined +themselves in Scala 3. + +For reasons of backwards compatibility, Scala 3 for the moment also +auto-inserts `()` for nullary methods that are defined in Scala 2, or +that override a method defined in Scala 2. It turns out that, because +the correspondence between definition and call was not enforced in +Scala so far, there are quite a few method definitions in Scala 2 +libraries that use `()` in an inconsistent way. For instance, we +find in `scala.math.Numeric` + +```scala +def toInt(): Int +``` + +whereas `toInt` is written without parameters everywhere +else. Enforcing strict parameter correspondence for references to +such methods would project the inconsistencies to client code, which +is undesirable. So Scala 3 opts for more leniency when type-checking +references to such methods until most core libraries in Scala 2 have +been cleaned up. + +Stricter conformance rules also apply to overriding of nullary +methods. It is no longer allowed to override a parameterless method +by a nullary method or _vice versa_. Instead, both methods must agree +exactly in their parameter lists. + +```scala +class A: + def next(): Int + +class B extends A: + def next: Int // overriding error: incompatible type +``` + +Methods overriding Java or Scala 2 methods are again exempted from this +requirement. + +## Migrating code + +Existing Scala code with inconsistent parameters can still be compiled +in Scala 3 under `-source 3.0-migration`. When paired with the `-rewrite` +option, the code will be automatically rewritten to conform to Scala 3's +stricter checking. + +## Reference + +For more information, see [Issue #2570](https://github.com/lampepfl/dotty/issues/2570) and [PR #2716](https://github.com/lampepfl/dotty/pull/2716). diff --git a/docs/_spec/TODOreference/dropped-features/delayed-init.md b/docs/_spec/TODOreference/dropped-features/delayed-init.md new file mode 100644 index 000000000000..5d4f614ce951 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/delayed-init.md @@ -0,0 +1,32 @@ +--- +layout: doc-page +title: "Dropped: DelayedInit" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/delayed-init.html +--- + +The special handling of the [`DelayedInit`](https://scala-lang.org/api/3.x/scala/DelayedInit.html) +trait is no longer supported. + +One consequence is that the [`App`](https://scala-lang.org/api/3.x/scala/App.html) class, +which used [`DelayedInit`](https://scala-lang.org/api/3.x/scala/DelayedInit.html) is +now partially broken. You can still use `App` as a simple way to set up a main program. Example: + +```scala +object HelloWorld extends App { + println("Hello, world!") +} +``` + +However, the code is now run in the initializer of the object, which on +some JVM's means that it will only be interpreted. So, better not use it +for benchmarking! Also, if you want to access the command line arguments, +you need to use an explicit `main` method for that. + +```scala +object Hello: + def main(args: Array[String]) = + println(s"Hello, ${args(0)}") +``` + +On the other hand, Scala 3 offers a convenient alternative to such "program" objects +with [`@main` methods](../changed-features/main-functions.md). diff --git a/docs/_spec/TODOreference/dropped-features/do-while.md b/docs/_spec/TODOreference/dropped-features/do-while.md new file mode 100644 index 000000000000..08a730b8b5a7 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/do-while.md @@ -0,0 +1,41 @@ +--- +layout: doc-page +title: "Dropped: Do-While" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/do-while.html +--- + +The syntax construct +```scala +do while +``` +is no longer supported. Instead, it is recommended to use the equivalent `while` loop +below: +```scala +while ({ ; }) () +``` +For instance, instead of +```scala +do + i += 1 +while (f(i) == 0) +``` +one writes +```scala +while + i += 1 + f(i) == 0 +do () +``` +The idea to use a block as the condition of a while also gives a solution +to the "loop-and-a-half" problem. Here is another example: +```scala +while + val x: Int = iterator.next + x >= 0 +do print(".") +``` + +## Why Drop The Construct? + + - `do-while` is used relatively rarely and it can be expressed faithfully using just `while`. So there seems to be little point in having it as a separate syntax construct. + - Under the [new syntax rules](../other-new-features/control-syntax.md) `do` is used as a statement continuation, which would clash with its meaning as a statement introduction. diff --git a/docs/_spec/TODOreference/dropped-features/dropped-features.md b/docs/_spec/TODOreference/dropped-features/dropped-features.md new file mode 100644 index 000000000000..f6a13d9fa5da --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/dropped-features.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Dropped Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features.html +--- + +The following pages document the features of Scala 2 that have been dropped in Scala 3. diff --git a/docs/_spec/TODOreference/dropped-features/early-initializers.md b/docs/_spec/TODOreference/dropped-features/early-initializers.md new file mode 100644 index 000000000000..6f7c59c4f031 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/early-initializers.md @@ -0,0 +1,16 @@ +--- +layout: doc-page +title: "Dropped: Early Initializers" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/early-initializers.html +--- + +Early initializers of the form + +```scala +class C extends { ... } with SuperClass ... +``` + +have been dropped. They were rarely used, and mostly to compensate for the lack of +[trait parameters](../other-new-features/trait-parameters.md), which are now directly supported in Scala 3. + +For more information, see [SLS §5.1.6](https://www.scala-lang.org/files/archive/spec/2.13/05-classes-and-objects.html#early-definitions). diff --git a/docs/_spec/TODOreference/dropped-features/existential-types.md b/docs/_spec/TODOreference/dropped-features/existential-types.md new file mode 100644 index 000000000000..6ef815152cd0 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/existential-types.md @@ -0,0 +1,35 @@ +--- +layout: doc-page +title: "Dropped: Existential Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/existential-types.html +--- + +Existential types using `forSome` (as in +[SLS §3.2.12](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#existential-types)) +have been dropped. The reasons for dropping them are: + + - Existential types violate a type soundness principle on which DOT + and Scala 3 are constructed. That principle says that every + prefix (`p`, respectvely `S`) of a type selection `p.T` or `S#T` + must either come from a value constructed at runtime or refer to a + type that is known to have only good bounds. + + - Existential types create many difficult feature interactions + with other Scala constructs. + + - Existential types largely overlap with path-dependent types, + so the gain of having them is relatively minor. + +Existential types that can be expressed using only wildcards (but not +`forSome`) are still supported, but are treated as refined types. +For instance, the type +```scala +Map[_ <: AnyRef, Int] +``` +is treated as the type `Map`, where the first type parameter +is upper-bounded by `AnyRef` and the second type parameter is an alias +of `Int`. + +When reading class files compiled with Scala 2, Scala 3 will do a best +effort to approximate existential types with its own types. It will +issue a warning that a precise emulation is not possible. diff --git a/docs/_spec/TODOreference/dropped-features/limit22.md b/docs/_spec/TODOreference/dropped-features/limit22.md new file mode 100644 index 000000000000..e72aeadbe2ca --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/limit22.md @@ -0,0 +1,17 @@ +--- +layout: doc-page +title: "Dropped: Limit 22" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/limit22.html +--- + +The limits of 22 for the maximal number of parameters of function types and the +maximal number of fields in tuple types have been dropped. + +* Functions can now have an arbitrary number of parameters. Functions beyond + [`scala.Function22`](https://www.scala-lang.org/api/current/scala/Function22.html) are erased to a new trait [`scala.runtime.FunctionXXL`](https://scala-lang.org/api/3.x/scala/runtime/FunctionXXL.html). + +* Tuples can also have an arbitrary number of fields. Tuples beyond [`scala.Tuple22`](https://www.scala-lang.org/api/current/scala/Tuple22.html) + are erased to a new class [`scala.runtime.TupleXXL`](https://scala-lang.org/api/3.x/scala/runtime/TupleXXL.html) (which extends the trait [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.md)). Furthermore, they support generic + operation such as concatenation and indexing. + +Both of these are implemented using arrays. diff --git a/docs/_spec/TODOreference/dropped-features/macros.md b/docs/_spec/TODOreference/dropped-features/macros.md new file mode 100644 index 000000000000..7ffe9043d0cd --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/macros.md @@ -0,0 +1,16 @@ +--- +layout: doc-page +title: "Dropped: Scala 2 Macros" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html +--- + +The previous, experimental macro system has been dropped. + +Instead, there is a cleaner, more restricted system based on two complementary concepts: `inline` and `'{ ... }`/`${ ... }` code generation. +`'{ ... }` delays the compilation of the code and produces an object containing the code, dually `${ ... }` evaluates an expression which produces code and inserts it in the surrounding `${ ... }`. +In this setting, a definition marked as inlined containing a `${ ... }` is a macro, the code inside the `${ ... }` is executed at compile-time and produces code in the form of `'{ ... }`. +Additionally, the contents of code can be inspected and created with a more complex reflection API as an extension of `'{ ... }`/`${ ... }` framework. + +* `inline` has been [implemented](../metaprogramming/inline.md) in Scala 3. +* Quotes `'{ ... }` and splices `${ ... }` has been [implemented](../metaprogramming/macros.md) in Scala 3. +* [TASTy reflect](../metaprogramming/reflection.md) provides more complex tree based APIs to inspect or create quoted code. diff --git a/docs/_spec/TODOreference/dropped-features/nonlocal-returns.md b/docs/_spec/TODOreference/dropped-features/nonlocal-returns.md new file mode 100644 index 000000000000..17b86f77ee56 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/nonlocal-returns.md @@ -0,0 +1,29 @@ +--- +layout: doc-page +title: "Deprecated: Nonlocal Returns" + +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html +--- + +Returning from nested anonymous functions has been deprecated, and will produce a warning from version `3.2`. + +Nonlocal returns are implemented by throwing and catching `scala.runtime.NonLocalReturnException`-s. This is rarely what is intended by the programmer. It can be problematic because of the hidden performance cost of throwing and catching exceptions. Furthermore, it is a leaky implementation: a catch-all exception handler can intercept a `NonLocalReturnException`. + +A drop-in library replacement is provided in [`scala.util.control.NonLocalReturns`](https://scala-lang.org/api/3.x/scala/util/control/NonLocalReturns$.html). Example: + +```scala +import scala.util.control.NonLocalReturns.* + +extension [T](xs: List[T]) + def has(elem: T): Boolean = returning { + for x <- xs do + if x == elem then throwReturn(true) + false + } + +@main def test(): Unit = + val xs = List(1, 2, 3, 4, 5) + assert(xs.has(2) == xs.contains(2)) +``` + +Note: compiler produces deprecation error on nonlocal returns only with `-source:future` option. diff --git a/docs/_spec/TODOreference/dropped-features/package-objects.md b/docs/_spec/TODOreference/dropped-features/package-objects.md new file mode 100644 index 000000000000..d8149e460bf5 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/package-objects.md @@ -0,0 +1,48 @@ +--- +layout: doc-page +title: "Dropped: Package Objects" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/package-objects.html +--- + +Package objects +```scala +package object p { + val a = ... + def b = ... +} +``` +will be dropped. They are still available in Scala 3.0 and 3.1, but will be deprecated and removed afterwards. + +Package objects are no longer needed since all kinds of definitions can now be written at the top-level. Example: +```scala +package p +type Labelled[T] = (String, T) +val a: Labelled[Int] = ("count", 1) +def b = a._2 + +case class C() + +extension (x: C) def pair(y: C) = (x, y) +``` +There may be several source files in a package containing such top-level definitions, and source files can freely mix top-level value, method, and type definitions with classes and objects. + +The compiler generates synthetic objects that wrap top-level definitions falling into one of the following categories: + + - all pattern, value, method, and type definitions, + - implicit classes and objects, + - companion objects of opaque type aliases. + +If a source file `src.scala` contains such top-level definitions, they will be put in a synthetic object named `src$package`. The wrapping is transparent, however. The definitions in `src` can still be accessed as members of the enclosing package. The synthetic object will be placed last in the file, +after any other package clauses, imports, or object and class definitions. + +**Note:** This means that +1. The name of a source file containing wrapped top-level definitions is relevant for binary compatibility. If the name changes, so does the name of the generated object and its class. + +2. A top-level main method `def main(args: Array[String]): Unit = ...` is wrapped as any other method. If it appears +in a source file `src.scala`, it could be invoked from the command line using a command like `scala src$package`. Since the +"program name" is mangled it is recommended to always put `main` methods in explicitly named objects. + +3. The notion of `private` is independent of whether a definition is wrapped or not. A `private` top-level definition is always visible from everywhere in the enclosing package. + +4. If several top-level definitions are overloaded variants with the same name, +they must all come from the same source file. diff --git a/docs/_spec/TODOreference/dropped-features/procedure-syntax.md b/docs/_spec/TODOreference/dropped-features/procedure-syntax.md new file mode 100644 index 000000000000..de76fbb32af2 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/procedure-syntax.md @@ -0,0 +1,19 @@ +--- +layout: doc-page +title: "Dropped: Procedure Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/procedure-syntax.html +--- + +Procedure syntax +```scala +def f() { ... } +``` +has been dropped. You need to write one of the following instead: +```scala +def f() = { ... } +def f(): Unit = { ... } +``` +Scala 3 accepts the old syntax under the `-source:3.0-migration` option. +If the `-migration` option is set, it can even rewrite old syntax to new. +The [Scalafix](https://scalacenter.github.io/scalafix/) tool also +can rewrite procedure syntax to make it Scala 3 compatible. diff --git a/docs/_spec/TODOreference/dropped-features/symlits.md b/docs/_spec/TODOreference/dropped-features/symlits.md new file mode 100644 index 000000000000..d3c0180b16e6 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/symlits.md @@ -0,0 +1,24 @@ +--- +layout: doc-page +title: "Dropped: Symbol Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/symlits.html +--- + +Symbol literals are no longer supported. + +The [`scala.Symbol`](https://scala-lang.org/api/3.x/scala/Symbol.html) class still exists, so a literal translation of the symbol literal `'xyz` is `Symbol("xyz")`. However, it is recommended to use a plain string literal `"xyz"` instead. (The `Symbol` class will be deprecated and removed in the future). Example: + + +``` +scalac Test.scala +-- Error: Test.scala:1:25 ------------------------------------------------------------------------------------------------ + +1 |@main def test = println('abc) + | ^ + | symbol literal 'abc is no longer supported, + | use a string literal "abc" or an application Symbol("abc") instead, + | or enclose in braces '{abc} if you want a quoted expression. + | For now, you can also `import language.deprecated.symbolLiterals` to accept + | the idiom, but this possibility might no longer be available in the future. +1 error found +``` diff --git a/docs/_spec/TODOreference/dropped-features/this-qualifier.md b/docs/_spec/TODOreference/dropped-features/this-qualifier.md new file mode 100644 index 000000000000..4fcadff8fae3 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/this-qualifier.md @@ -0,0 +1,33 @@ +--- +layout: doc-page +title: "Dropped: private[this] and protected[this]" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +--- + +The `private[this]` and `protected[this]` access modifiers are deprecated and will be phased out. + +Previously, these modifiers were needed for + + - avoiding the generation of getters and setters + - excluding code under a `private[this]` from variance checks. (Scala 2 also excludes `protected[this]` but this was found to be unsound and was therefore removed). + - avoiding the generation of fields, if a `private[this] val` is not accessed + by a class method. + +The compiler now infers for `private` members the fact that they are only accessed via `this`. Such members are treated as if they had been declared `private[this]`. `protected[this]` is dropped without a replacement. + +This change can in some cases change the semantics of a Scala program, since a +`private` val is no longer guaranteed to generate a field. The field +is omitted if + + - the `val` is only accessed via `this`, and + - the `val` is not accessed from a method in the current class. + +This can cause problems if a program tries to access the missing private field via reflection. The recommended fix is to declare the field instead to be qualified private with the enclosing class as qualifier. Example: +```scala + class C(x: Int): + private[C] val field = x + 1 + // [C] needed if `field` is to be accessed through reflection + val retained = field * field +``` + + diff --git a/docs/_spec/TODOreference/dropped-features/type-projection.md b/docs/_spec/TODOreference/dropped-features/type-projection.md new file mode 100644 index 000000000000..08b5ffb34eca --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/type-projection.md @@ -0,0 +1,18 @@ +--- +layout: doc-page +title: "Dropped: General Type Projection" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/type-projection.html +--- + +Scala so far allowed general type projection `T#A` where `T` is an arbitrary type +and `A` names a type member of `T`. + +Scala 3 disallows this if `T` is an abstract type (class types and type aliases +are fine). This change was made because unrestricted type projection +is [unsound](https://github.com/lampepfl/dotty/issues/1050). + +This restriction rules out the [type-level encoding of a combinator +calculus](https://michid.wordpress.com/2010/01/29/scala-type-level-encoding-of-the-ski-calculus/). + +To rewrite code using type projections on abstract types, consider using +path-dependent types or implicit parameters. diff --git a/docs/_spec/TODOreference/dropped-features/weak-conformance-spec.md b/docs/_spec/TODOreference/dropped-features/weak-conformance-spec.md new file mode 100644 index 000000000000..07625dcfe885 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/weak-conformance-spec.md @@ -0,0 +1,54 @@ +--- +layout: doc-page +title: "Dropped: Weak Conformance - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/weak-conformance-spec.html +--- + +To simplify the underlying type theory, Scala 3 drops the notion of +[*weak conformance*](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#weak-conformance) +altogether. Instead, it provides more flexibility when +assigning a type to a constant expression. The new rule is: + + - *If* a list of expressions `Es` appears as one of + + - the elements of a vararg parameter, or + - the alternatives of an if-then-else or match expression, or + - the body and catch results of a try expression, + +- *and* all expressions have primitive numeric types, but they do not + all have the same type, + +- *then* the following is attempted: + + - the expressions `Es` are partitioned into `Int` constants on the + one hand, and all other expressions on the other hand, + - if all the other expressions have the same numeric type `T` + (which can be one of `Byte`, `Short`, `Char`, `Int`, `Long`, `Float`, + `Double`), possibly after widening, and if none of the `Int` + literals would incur a loss of precision when converted to `T`, + then they are thus converted (the other expressions are left + unchanged regardless), + - otherwise, the expressions `Es` are used unchanged. + + A loss of precision occurs for + - an `Int -> Float` conversion of a constant + `c` if `c.toFloat.toInt != c` + - an `Int -> Byte` conversion of a constant + `c` if `c.toByte.toInt != c`, + - an `Int -> Short` conversion of a constant + `c` if `c.toShort.toInt != c`. + +## Examples + +```scala +inline val b = 33 +def f(): Int = b + 1 +Array(b, 33, 5.5) : Array[Double] // b is an inline val +Array(f(), 33, 5.5) : Array[AnyVal] // f() is not a constant +Array(5, 11L) : Array[Long] +Array(5, 11L, 5.5) : Array[AnyVal] // Long and Double found +Array(1.0f, 2) : Array[Float] +Array(1.0f, 1234567890): Array[AnyVal] // loss of precision +Array(b, 33, 'a') : Array[Char] +Array(5.toByte, 11) : Array[Byte] +``` diff --git a/docs/_spec/TODOreference/dropped-features/weak-conformance.md b/docs/_spec/TODOreference/dropped-features/weak-conformance.md new file mode 100644 index 000000000000..b1478326b2c9 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/weak-conformance.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: "Dropped: Weak Conformance" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/weak-conformance.html +--- + +In some situations, Scala used a _weak conformance_ relation when +testing type compatibility or computing the least upper bound of a set +of types. The principal motivation behind weak conformance was to +make an expression like this have type `List[Double]`: + +```scala +List(1.0, math.sqrt(3.0), 0, -3.3) // : List[Double] +``` + +It's "obvious" that this should be a `List[Double]`. However, without +some special provision, the least upper bound of the lists's element +types `(Double, Double, Int, Double)` would be `AnyVal`, hence the list +expression would be given type `List[AnyVal]`. + +A less obvious example is the following one, which was also typed as a +`List[Double]`, using the weak conformance relation. + +```scala +val n: Int = 3 +val c: Char = 'X' +val d: Double = math.sqrt(3.0) +List(n, c, d) // used to be: List[Double], now: List[AnyVal] +``` + +Here, it is less clear why the type should be widened to +`List[Double]`, a `List[AnyVal]` seems to be an equally valid -- and +more principled -- choice. + +Weak conformance applies to all "numeric" types (including `Char`), and +independently of whether the expressions are literals or not. However, +in hindsight, the only intended use case is for *integer literals* to +be adapted to the type of the other expressions. Other types of numerics +have an explicit type annotation embedded in their syntax (`f`, `d`, +`.`, `L` or `'` for `Char`s) which ensures that their author really +meant them to have that specific type). + +Therefore, Scala 3 drops the general notion of weak conformance, and +instead keeps one rule: `Int` literals are adapted to other numeric +types if necessary. + +[More details](weak-conformance-spec.md) diff --git a/docs/_spec/TODOreference/dropped-features/wildcard-init.md b/docs/_spec/TODOreference/dropped-features/wildcard-init.md new file mode 100644 index 000000000000..e42854079cf9 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/wildcard-init.md @@ -0,0 +1,23 @@ +--- +layout: doc-page +title: "Dropped: Wildcard Initializer" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/wildcard-init.html +--- + +The syntax + +```scala + var x: A = _ +``` + +that was used to indicate an uninitialized field, has been dropped. +At its place there is a special value `uninitialized` in the `scala.compiletime` package. +To get an uninitialized field, you now write + +```scala +import scala.compiletime.uninitialized + +var x: A = uninitialized +``` + +To enable cross-compilation, `_` is still supported, but it will be dropped in a future 3.x version. diff --git a/docs/_spec/TODOreference/dropped-features/xml.md b/docs/_spec/TODOreference/dropped-features/xml.md new file mode 100644 index 000000000000..458a347a66c4 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/xml.md @@ -0,0 +1,39 @@ +--- +layout: doc-page +title: "Dropped: XML Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/xml.html +--- + +XML Literals are still supported, but will be dropped in the near future, to +be replaced with [XML string interpolation](https://github.com/lampepfl/xml-interpolator): + +```scala +import dotty.xml.interpolator.* + +case class Person(name: String) { override def toString = name } + +@main def test: Unit = + val bill = Person("Bill") + val john = Person("John") + val mike = Person("Mike") + val todoList = List( + (bill, john, "Meeting", "Room 203, 11:00am"), + (john, mike, "Holiday", "March 22-24") + ) + // XML literals (to be dropped) + val mails1 = for (from, to, heading, body) <- todoList yield + + {from}{to} + {heading}{body} + + println(mails1) + // XML string interpolation + val mails2 = for (from, to, heading, body) <- todoList yield xml""" + + ${from}${to} + ${heading}${body} + """ + println(mails2) +``` + +For more information, see the semester project [XML String Interpolator for Dotty](https://infoscience.epfl.ch/record/267527) by Yassin Kammoun (2019). diff --git a/docs/_spec/TODOreference/enums/adts.md b/docs/_spec/TODOreference/enums/adts.md new file mode 100644 index 000000000000..3ab8c9f3b45b --- /dev/null +++ b/docs/_spec/TODOreference/enums/adts.md @@ -0,0 +1,173 @@ +--- +layout: doc-page +title: "Algebraic Data Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/adts.html +--- + +The [`enum` concept](./enums.md) is general enough to also support algebraic data +types (ADTs) and their generalized version (GADTs). Here is an example +how an `Option` type can be represented as an ADT: + +```scala +enum Option[+T]: + case Some(x: T) + case None +``` + +This example introduces an `Option` enum with a covariant type +parameter `T` consisting of two cases, `Some` and `None`. `Some` is +parameterized with a value parameter `x`. It is a shorthand for writing a +case class that extends `Option`. Since `None` is not parameterized, it +is treated as a normal enum value. + +The `extends` clauses that were omitted in the example above can also +be given explicitly: + +```scala +enum Option[+T]: + case Some(x: T) extends Option[T] + case None extends Option[Nothing] +``` + +Note that the parent type of the `None` value is inferred as +`Option[Nothing]`. Generally, all covariant type parameters of the enum +class are minimized in a compiler-generated `extends` clause whereas all +contravariant type parameters are maximized. If `Option` was non-variant, +you would need to give the extends clause of `None` explicitly. + +As for normal enum values, the cases of an `enum` are all defined in +the `enum`s companion object. So it's `Option.Some` and `Option.None` +unless the definitions are "pulled out" with an import: + +```scala +scala> Option.Some("hello") +val res1: t2.Option[String] = Some(hello) + +scala> Option.None +val res2: t2.Option[Nothing] = None +``` + +Note that the type of the expressions above is always `Option`. Generally, the type of a enum case constructor application will be widened to the underlying enum type, unless a more specific type is expected. This is a subtle difference with respect to normal case classes. The classes making up the cases do exist, and can be unveiled, either by constructing them directly with a `new`, or by explicitly providing an expected type. + +```scala +scala> new Option.Some(2) +val res3: Option.Some[Int] = Some(2) +scala> val x: Option.Some[Int] = Option.Some(3) +val res4: Option.Some[Int] = Some(3) +``` + +As all other enums, ADTs can define methods. For instance, here is `Option` again, with an +`isDefined` method and an `Option(...)` constructor in its companion object. + +```scala +enum Option[+T]: + case Some(x: T) + case None + + def isDefined: Boolean = this match + case None => false + case _ => true + +object Option: + + def apply[T >: Null](x: T): Option[T] = + if x == null then None else Some(x) + +end Option +``` + +Enumerations and ADTs have been presented as two different +concepts. But since they share the same syntactic construct, they can +be seen simply as two ends of a spectrum and it is perfectly possible +to construct hybrids. For instance, the code below gives an +implementation of `Color` either with three enum values or with a +parameterized case that takes an RGB value. + +```scala +enum Color(val rgb: Int): + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) + case Mix(mix: Int) extends Color(mix) +``` + +## Parameter Variance of Enums + +By default, parameterized cases of enums with type parameters will copy the type parameters of their parent, along +with any variance notations. As usual, it is important to use type parameters carefully when they are variant, as shown +below: + +The following `View` enum has a contravariant type parameter `T` and a single case `Refl`, representing a function +mapping a type `T` to itself: + +```scala +enum View[-T]: + case Refl(f: T => T) +``` + +The definition of `Refl` is incorrect, as it uses contravariant type `T` in the covariant result position of a +function type, leading to the following error: + +```scala +-- Error: View.scala:2:12 -------- +2 | case Refl(f: T => T) + | ^^^^^^^^^ + |contravariant type T occurs in covariant position in type T => T of value f + |enum case Refl requires explicit declaration of type T to resolve this issue. +``` + +Because `Refl` does not declare explicit parameters, it looks to the compiler like the following: + +```scala +enum View[-T]: + case Refl[/*synthetic*/-T1](f: T1 => T1) extends View[T1] +``` + +The compiler has inferred for `Refl` the contravariant type parameter `T1`, following `T` in `View`. +We can now clearly see that `Refl` needs to declare its own non-variant type parameter to correctly type `f`, +and can remedy the error by the following change to `Refl`: + +```diff +enum View[-T]: +- case Refl(f: T => T) ++ case Refl[R](f: R => R) extends View[R] +``` + +Above, type `R` is chosen as the parameter for `Refl` to highlight that it has a different meaning to +type `T` in `View`, but any name will do. + +After some further changes, a more complete implementation of `View` can be given as follows and be used +as the function type `T => U`: + +```scala +enum View[-T, +U] extends (T => U): + case Refl[R](f: R => R) extends View[R, R] + + final def apply(t: T): U = this match + case refl: Refl[r] => refl.f(t) +``` + +## Syntax of Enums + +Changes to the syntax fall in two categories: enum definitions and cases inside enums. +The changes are specified below as deltas with respect to the Scala syntax given [here](../syntax.md) + + 1. Enum definitions are defined as follows: + + ``` + TmplDef ::= `enum' EnumDef + EnumDef ::= id ClassConstr [`extends' [ConstrApps]] EnumBody + EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ + EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase + ``` + + 2. Cases of enums are defined as follows: + + ``` + EnumCase ::= `case' (id ClassConstr [`extends' ConstrApps]] | ids) + ``` + +## Reference + +For more information, see [Issue #1970](https://github.com/lampepfl/dotty/issues/1970). diff --git a/docs/_spec/TODOreference/enums/desugarEnums.md b/docs/_spec/TODOreference/enums/desugarEnums.md new file mode 100644 index 000000000000..477653d670bb --- /dev/null +++ b/docs/_spec/TODOreference/enums/desugarEnums.md @@ -0,0 +1,215 @@ +--- +layout: doc-page +title: "Translation of Enums and ADTs" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/desugarEnums.html +--- + +The compiler expands enums and their cases to code that only uses +Scala's other language features. As such, enums in Scala are +convenient _syntactic sugar_, but they are not essential to understand +Scala's core. + +We now explain the expansion of enums in detail. First, +some terminology and notational conventions: + + - We use `E` as a name of an enum, and `C` as a name of a case that appears in `E`. + - We use `<...>` for syntactic constructs that in some circumstances might be empty. For instance, + `` represents one or more parameter lists `(...)` or nothing at all. + + - Enum cases fall into three categories: + + - _Class cases_ are those cases that are parameterized, either with a type parameter section `[...]` or with one or more (possibly empty) parameter sections `(...)`. + - _Simple cases_ are cases of a non-generic enum that have neither parameters nor an extends clause or body. That is, they consist of a name only. + - _Value cases_ are all cases that do not have a parameter section but that do have a (possibly generated) `extends` clause and/or a body. + + Simple cases and value cases are collectively called _singleton cases_. + +The desugaring rules imply that class cases are mapped to case classes, and singleton cases are mapped to `val` definitions. + +There are nine desugaring rules. Rule (1) desugars enum definitions. Rules +(2) and (3) desugar simple cases. Rules (4) to (6) define `extends` clauses for cases that +are missing them. Rules (7) to (9) define how such cases with `extends` clauses +map into `case class`es or `val`s. + +1. An `enum` definition + ```scala + enum E ... { } + ``` + expands to a `sealed abstract` class that extends the `scala.reflect.Enum` trait and + an associated companion object that contains the defined cases, expanded according + to rules (2 - 8). The enum class starts with a compiler-generated import that imports + the names `` of all cases so that they can be used without prefix in the class. + ```scala + sealed abstract class E ... extends with scala.reflect.Enum { + import E.{ } + + } + object E { } + ``` + +2. A simple case consisting of a comma-separated list of enum names + ```scala + case C_1, ..., C_n + ``` + expands to + ```scala + case C_1; ...; case C_n + ``` + Any modifiers or annotations on the original case extend to all expanded + cases. + +3. A simple case + ```scala + case C + ``` + of an enum `E` that does not take type parameters expands to + ```scala + val C = $new(n, "C") + ``` + Here, `$new` is a private method that creates an instance of `E` (see + below). + +4. If `E` is an enum with type parameters + ```scala + V1 T1 >: L1 <: U1 , ... , Vn Tn >: Ln <: Un (n > 0) + ``` + where each of the variances `Vi` is either `'+'` or `'-'`, then a simple case + ```scala + case C + ``` + expands to + ```scala + case C extends E[B1, ..., Bn] + ``` + where `Bi` is `Li` if `Vi = '+'` and `Ui` if `Vi = '-'`. This result is then further + rewritten with rule (8). Simple cases of enums with non-variant type + parameters are not permitted (however value cases with explicit `extends` clause are) + +5. A class case without an extends clause + ```scala + case C + ``` + of an enum `E` that does not take type parameters expands to + ```scala + case C extends E + ``` + This result is then further rewritten with rule (9). + +6. If `E` is an enum with type parameters `Ts`, a class case with neither type parameters nor an extends clause + ```scala + case C + ``` + expands to + ```scala + case C[Ts] extends E[Ts] + ``` + This result is then further rewritten with rule (9). For class cases that have type parameters themselves, an extends clause needs to be given explicitly. + +7. If `E` is an enum with type parameters `Ts`, a class case without type parameters but with an extends clause + ```scala + case C extends + ``` + expands to + ```scala + case C[Ts] extends + ``` + provided at least one of the parameters `Ts` is mentioned in a parameter type in + `` or in a type argument in ``. + +8. A value case + ```scala + case C extends + ``` + expands to a value definition in `E`'s companion object: + ```scala + val C = new { ; def ordinal = n } + ``` + where `n` is the ordinal number of the case in the companion object, + starting from 0. The anonymous class also + implements the abstract `Product` methods that it inherits from `Enum`. + + It is an error if a value case refers to a type parameter of the enclosing `enum` + in a type argument of ``. + +9. A class case + ```scala + case C extends + ``` + expands analogous to a final case class in `E`'s companion object: + ```scala + final case class C extends + ``` + The enum case defines an `ordinal` method of the form + ```scala + def ordinal = n + ``` + where `n` is the ordinal number of the case in the companion object, + starting from 0. + + It is an error if a value case refers to a type parameter of the enclosing `enum` + in a parameter type in `` or in a type argument of ``, unless that parameter is already + a type parameter of the case, i.e. the parameter name is defined in ``. + + The compiler-generated `apply` and `copy` methods of an enum case + ```scala + case C(ps) extends P1, ..., Pn + ``` + are treated specially. A call `C(ts)` of the apply method is ascribed the underlying type + `P1 & ... & Pn` (dropping any [transparent traits](../other-new-features/transparent-traits.md)) + as long as that type is still compatible with the expected type at the point of application. + A call `t.copy(ts)` of `C`'s `copy` method is treated in the same way. + +## Translation of Enums with Singleton Cases + +An enum `E` (possibly generic) that defines one or more singleton cases +will define the following additional synthetic members in its companion object (where `E'` denotes `E` with +any type parameters replaced by wildcards): + + - A method `valueOf(name: String): E'`. It returns the singleton case value whose identifier is `name`. + - A method `values` which returns an `Array[E']` of all singleton case + values defined by `E`, in the order of their definitions. + +If `E` contains at least one simple case, its companion object will define in addition: + + - A private method `$new` which defines a new simple case value with given + ordinal number and name. This method can be thought as being defined as + follows. + + ```scala + private def $new(_$ordinal: Int, $name: String) = + new E with runtime.EnumValue: + def ordinal = _$ordinal + override def productPrefix = $name // if not overridden in `E` + override def toString = $name // if not overridden in `E` + ``` + +The anonymous class also implements the abstract `Product` methods that it inherits from `Enum`. +The `ordinal` method is only generated if the enum does not extend from `java.lang.Enum` (as Scala enums do not extend +`java.lang.Enum`s unless explicitly specified). In case it does, there is no need to generate `ordinal` as +`java.lang.Enum` defines it. Similarly there is no need to override `toString` as that is defined in terms of `name` in +`java.lang.Enum`. Finally, `productPrefix` will call `this.name` when `E` extends `java.lang.Enum`. + +## Scopes for Enum Cases + +A case in an `enum` is treated similarly to a secondary constructor. It can access neither the enclosing `enum` using `this`, nor its value parameters or instance members using simple +identifiers. + +Even though translated enum cases are located in the enum's companion object, referencing +this object or its members via `this` or a simple identifier is also illegal. The compiler typechecks enum cases in the scope of the enclosing companion object but flags any such illegal accesses as errors. + +## Translation of Java-compatible enums + +A Java-compatible enum is an enum that extends `java.lang.Enum`. The translation rules are the same as above, with the reservations defined in this section. + +It is a compile-time error for a Java-compatible enum to have class cases. + +Cases such as `case C` expand to a `@static val` as opposed to a `val`. This allows them to be generated as static fields of the enum type, thus ensuring they are represented the same way as Java enums. + +## Other Rules + +- A normal case class which is not produced from an enum case is not allowed to extend + `scala.reflect.Enum`. This ensures that the only cases of an enum are the ones that are + explicitly declared in it. + +- If an enum case has an `extends` clause, the enum class must be one of the + classes that's extended. diff --git a/docs/_spec/TODOreference/enums/enums-index.md b/docs/_spec/TODOreference/enums/enums-index.md new file mode 100644 index 000000000000..80d703c3e897 --- /dev/null +++ b/docs/_spec/TODOreference/enums/enums-index.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Enums" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/index.html +--- + +This chapter documents enums in Scala 3. diff --git a/docs/_spec/TODOreference/enums/enums.md b/docs/_spec/TODOreference/enums/enums.md new file mode 100644 index 000000000000..65051bdfb39f --- /dev/null +++ b/docs/_spec/TODOreference/enums/enums.md @@ -0,0 +1,222 @@ +--- +layout: doc-page +title: "Enumerations" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/enums.html +--- + +An enumeration is used to define a type consisting of a set of named values. + +```scala +enum Color: + case Red, Green, Blue +``` + +This defines a new `sealed` class, `Color`, with three values, `Color.Red`, +`Color.Green`, `Color.Blue`. The color values are members of `Color`s +companion object. + +## Parameterized enums + +Enums can be parameterized. + +```scala +enum Color(val rgb: Int): + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) +``` + +As the example shows, you can define the parameter value by using an +explicit extends clause. + +## Methods defined for enums + +The values of an enum correspond to unique integers. The integer +associated with an enum value is returned by its `ordinal` method: + +```scala +scala> val red = Color.Red +val red: Color = Red +scala> red.ordinal +val res0: Int = 0 +``` + +The companion object of an enum also defines three utility methods. +The `valueOf` method obtains an enum value +by its name. The `values` method returns all enum values +defined in an enumeration in an `Array`. The `fromOrdinal` +method obtains an enum value from its ordinal (`Int`) value. + +```scala +scala> Color.valueOf("Blue") +val res0: Color = Blue +scala> Color.values +val res1: Array[Color] = Array(Red, Green, Blue) +scala> Color.fromOrdinal(0) +val res2: Color = Red +``` + +## User-defined members of enums + +It is possible to add your own definitions to an enum. Example: + +```scala +enum Planet(mass: Double, radius: Double): + private final val G = 6.67300E-11 + def surfaceGravity = G * mass / (radius * radius) + def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity + + case Mercury extends Planet(3.303e+23, 2.4397e6) + case Venus extends Planet(4.869e+24, 6.0518e6) + case Earth extends Planet(5.976e+24, 6.37814e6) + case Mars extends Planet(6.421e+23, 3.3972e6) + case Jupiter extends Planet(1.9e+27, 7.1492e7) + case Saturn extends Planet(5.688e+26, 6.0268e7) + case Uranus extends Planet(8.686e+25, 2.5559e7) + case Neptune extends Planet(1.024e+26, 2.4746e7) +end Planet +``` + +## User-defined companion object of enums +It is also possible to define an explicit companion object for an enum: + +```scala +object Planet: + def main(args: Array[String]) = + val earthWeight = args(0).toDouble + val mass = earthWeight / Earth.surfaceGravity + for p <- values do + println(s"Your weight on $p is ${p.surfaceWeight(mass)}") +end Planet +``` + +## Restrictions on Enum Cases + +Enum case declarations are similar to secondary constructors: +they are scoped outside of the enum template, despite being declared within it. +This means that enum case declarations cannot access inner members of the +enum class. + +Similarly, enum case declarations may not directly reference members of the enum's companion object, +even if they are imported (directly, or by renaming). For example: + +```scala +import Planet.* +enum Planet(mass: Double, radius: Double): + private final val (mercuryMass, mercuryRadius) = (3.303e+23, 2.4397e6) + + case Mercury extends Planet(mercuryMass, mercuryRadius) // Not found + case Venus extends Planet(venusMass, venusRadius) // illegal reference + case Earth extends Planet(Planet.earthMass, Planet.earthRadius) // ok +object Planet: + private final val (venusMass, venusRadius) = (4.869e+24, 6.0518e6) + private final val (earthMass, earthRadius) = (5.976e+24, 6.37814e6) +end Planet +``` +The fields referenced by `Mercury` are not visible, and the fields referenced by `Venus` may not +be referenced directly (using `import Planet.*`). You must use an indirect reference, +such as demonstrated with `Earth`. + +## Deprecation of Enum Cases + +As a library author, you may want to signal that an enum case is no longer intended for use. However you could still want to gracefully handle the removal of a case from your public API, such as special casing deprecated cases. + +To illustrate, say that the `Planet` enum originally had an additional case: + +```diff + enum Planet(mass: Double, radius: Double): + ... + case Neptune extends Planet(1.024e+26, 2.4746e7) ++ case Pluto extends Planet(1.309e+22, 1.1883e3) + end Planet +``` + +We now want to deprecate the `Pluto` case. First we add the `scala.deprecated` annotation to `Pluto`: + +```diff + enum Planet(mass: Double, radius: Double): + ... + case Neptune extends Planet(1.024e+26, 2.4746e7) +- case Pluto extends Planet(1.309e+22, 1.1883e3) ++ ++ @deprecated("refer to IAU definition of planet") ++ case Pluto extends Planet(1.309e+22, 1.1883e3) + end Planet +``` + +Outside the lexical scopes of `enum Planet` or `object Planet`, references to `Planet.Pluto` will produce a deprecation warning, but within those scopes we can still reference it to implement introspection over the deprecated cases: + +```scala +trait Deprecations[T <: reflect.Enum] { + extension (t: T) def isDeprecatedCase: Boolean +} + +object Planet { + given Deprecations[Planet] with { + extension (p: Planet) + def isDeprecatedCase = p == Pluto + } +} +``` + +We could imagine that a library may use [type class derivation](../contextual/derivation.md) to automatically provide an instance for `Deprecations`. + +## Compatibility with Java Enums + +If you want to use the Scala-defined enums as [Java enums](https://docs.oracle.com/javase/tutorial/java/javaOO/enum.html), you can do so by extending +the class `java.lang.Enum`, which is imported by default, as follows: + +```scala +enum Color extends Enum[Color] { case Red, Green, Blue } +``` + +The type parameter comes from the Java enum [definition](https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Enum.html) and should be the same as the type of the enum. +There is no need to provide constructor arguments (as defined in the Java API docs) to `java.lang.Enum` when extending it – the compiler will generate them automatically. + +After defining `Color` like that, you can use it like you would a Java enum: + +```scala +scala> Color.Red.compareTo(Color.Green) +val res15: Int = -1 +``` + +For a more in-depth example of using Scala 3 enums from Java, see [this test](https://github.com/lampepfl/dotty/tree/main/tests/run/enum-java). In the test, the enums are defined in the `MainScala.scala` file and used from a Java source, `Test.java`. + +## Implementation + +Enums are represented as `sealed` classes that extend the `scala.reflect.Enum` trait. +This trait defines a single public method, `ordinal`: + +```scala +package scala.reflect + +/** A base trait of all Scala enum definitions */ +transparent trait Enum extends Any, Product, Serializable: + + /** A number uniquely identifying a case of an enum */ + def ordinal: Int +``` + +Enum values with `extends` clauses get expanded to anonymous class instances. +For instance, the `Venus` value above would be defined like this: + +```scala +val Venus: Planet = new Planet(4.869E24, 6051800.0): + def ordinal: Int = 1 + override def productPrefix: String = "Venus" + override def toString: String = "Venus" +``` + +Enum values without `extends` clauses all share a single implementation +that can be instantiated using a private method that takes a tag and a name as arguments. +For instance, the first +definition of value `Color.Red` above would expand to: + +```scala +val Red: Color = $new(0, "Red") +``` + +## Reference + +For more information, see [Issue #1970](https://github.com/lampepfl/dotty/issues/1970) and +[PR #4003](https://github.com/lampepfl/dotty/pull/4003). diff --git a/docs/_spec/TODOreference/experimental/canthrow.md b/docs/_spec/TODOreference/experimental/canthrow.md new file mode 100644 index 000000000000..025a0ed1c686 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/canthrow.md @@ -0,0 +1,281 @@ +--- +layout: doc-page +title: "CanThrow Capabilities" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/canthrow.html +--- + +This page describes experimental support for exception checking in Scala 3. It is enabled by the language import +```scala +import language.experimental.saferExceptions +``` +The reason for publishing this extension now is to get feedback on its usability. We are working on more advanced type systems that build on the general ideas put forward in the extension. Those type systems have application areas beyond checked exceptions. Exception checking is a useful starting point since exceptions are familiar to all Scala programmers and their current treatment leaves room for improvement. + +## Why Exceptions? + +Exceptions are an ideal mechanism for error handling in many situations. They serve the intended purpose of propagating error conditions with a minimum of boilerplate. They cause zero overhead for the "happy path", which means they are very efficient as long as errors arise infrequently. Exceptions are also debug friendly, since they produce stack traces that can be inspected at the handler site. So one never has to guess where an erroneous condition originated. + +## Why Not Exceptions? + +However, exceptions in current Scala and many other languages are not reflected in the type system. This means that an essential part of the contract of a function - i.e. what exceptions can it produce? - is not statically checked. Most people acknowledge that this is a problem, but that so far the alternative of checked exceptions was just too painful to be considered. A good example are Java checked exceptions, which do the right thing in principle, but are widely regarded as a mistake since they are so difficult to deal with. So far, none of the successor languages that are modeled after Java or that build on the JVM has copied this feature. See for example Anders Hejlsberg's [statement on why C# does not have checked exceptions](https://www.artima.com/articles/the-trouble-with-checked-exceptions). + +## The Problem With Java's Checked Exceptions + +The main problem with [Java's checked exception model](https://docs.oracle.com/javase/specs/jls/se8/html/jls-11.html#jls-11.2) is its inflexibility, which is due to lack of polymorphism. Consider for instance the `map` function which is declared on `List[A]` like this: +```scala + def map[B](f: A => B): List[B] +``` +In the Java model, function `f` is not allowed to throw a checked exception. So the following call would be invalid: +```scala + xs.map(x => if x < limit then x * x else throw LimitExceeded()) +``` +The only way around this would be to wrap the checked exception `LimitExceeded` in an unchecked [`java.lang.RuntimeException`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/RuntimeException.html) that is caught at the callsite and unwrapped again. Something like this: +```scala + try + xs.map(x => if x < limit then x * x else throw Wrapper(LimitExceeded())) + catch case Wrapper(ex) => throw ex +``` +Ugh! No wonder checked exceptions in Java are not very popular. + +## Monadic Effects + +So the dilemma is that exceptions are easy to use only as long as we forget static type checking. This has caused many people working with Scala to abandon exceptions altogether and to use an error monad like [`Either`](https://scala-lang.org/api/3.x/scala/util/Either.html) instead. This can work in many situations but is not without its downsides either. It makes code a lot more complicated and harder to refactor. It means one is quickly confronted with the problem how to work with several monads. In general, dealing with one monad at a time in Scala is straightforward but dealing with several monads together is much less pleasant since monads don't compose. A great number of techniques have been proposed, implemented, and promoted to deal with this, from monad transformers, to free monads, to tagless final. But none of these techniques is universally liked; each introduces a complicated DSL that's hard to understand for non-experts, introduces runtime overheads, and makes debugging difficult. In the end, quite a few developers prefer to work instead with a single "super-monad" like [`ZIO`](https://zio.dev/version-1.x/datatypes/core/zio) that has error propagation built in alongside other aspects. This one-size fits all approach can work very nicely, even though (or is it because?) it represents an all-encompassing framework. + +However, a programming language is not a framework; it has to cater also for those applications that do not fit the framework's use cases. So there's still a strong motivation for getting exception checking right. + +## From Effects To Capabilities + +Why does `map` work so poorly with Java's checked exception model? It's because +`map`'s signature limits function arguments to not throw checked exceptions. We could try to come up with a more polymorphic formulation of `map`. For instance, it could look like this: +```scala + def map[B, E](f: A => B throws E): List[B] throws E +``` +This assumes a type `A throws E` to indicate computations of type `A` that can throw an exception of type `E`. But in practice the overhead of the additional type parameters makes this approach unappealing as well. Note in particular that we'd have to parameterize _every method_ that takes a function argument that way, so the added overhead of declaring all these exception types looks just like a sort of ceremony we would like to avoid. + +But there is a way to avoid the ceremony. Instead of concentrating on possible _effects_ such as "this code might throw an exception", concentrate on _capabilities_ such as "this code needs the capability to throw an exception". From a standpoint of expressiveness this is quite similar. But capabilities can be expressed as parameters whereas traditionally effects are expressed as some addition to result values. It turns out that this can make a big difference! + +## The `CanThrow` Capability + +In the _effects as capabilities_ model, an effect is expressed as an (implicit) parameter of a certain type. For exceptions we would expect parameters of type +[`CanThrow[E]`](https://scala-lang.org/api/3.x/scala/CanThrow.html) where `E` stands for the exception that can be thrown. Here is the definition of `CanThrow`: +```scala +erased class CanThrow[-E <: Exception] +``` +This shows another experimental Scala feature: [erased definitions](./erased-defs.md). Roughly speaking, values of an erased class do not generate runtime code; they are erased before code generation. This means that all `CanThrow` capabilities are compile-time only artifacts; they do not have a runtime footprint. + +Now, if the compiler sees a `throw Exc()` construct where `Exc` is a checked exception, it will check that there is a capability of type `CanThrow[Exc]` that can be summoned as a given. It's a compile-time error if that's not the case. + +How can the capability be produced? There are several possibilities: + +Most often, the capability is produced by having a using clause `(using CanThrow[Exc])` in some enclosing scope. This roughly corresponds to a [`throws`](https://docs.oracle.com/javase/specs/jls/se7/html/jls-8.html#jls-8.4.6) clause in Java. The analogy is even stronger since alongside [`CanThrow`](https://scala-lang.org/api/3.x/scala/CanThrow.html) there is also the following type alias defined in the [`scala`](https://scala-lang.org/api/3.x/scala.html) package: +```scala +infix type A = Int +``` +```scala +infix type $throws[R, +E <: Exception] = CanThrow[E] ?=> R +``` +That is, [`R $throws E`](https://scala-lang.org/api/3.x/scala/runtime.html#$throws-0) is a context function type that takes an implicit `CanThrow[E]` parameter and that returns a value of type `R`. What's more, the compiler will translate an infix types with `throws` as the operator to `$throws` applications according to the rules +``` + A throws E --> A $throws E + A throws E₁ | ... | Eᵢ --> A $throws E₁ ... $throws Eᵢ +``` +Therefore, a method written like this: +```scala +def m(x: T)(using CanThrow[E]): U +``` +can alternatively be expressed like this: +```scala +def m(x: T): U throws E +``` +Also the capability to throw multiple types of exceptions can be expressed in a few ways as shown in the examples below: +```scala +def m(x: T): U throws E1 | E2 +def m(x: T): U throws E1 throws E2 +def m(x: T)(using CanThrow[E1], CanThrow[E2]): U +def m(x: T)(using CanThrow[E1])(using CanThrow[E2]): U +def m(x: T)(using CanThrow[E1]): U throws E2 +``` + +**Note 1:** A signature like +```scala +def m(x: T)(using CanThrow[E1 | E2]): U +``` +would also allow throwing `E1` or `E2` inside the method's body but might cause problems when someone tried to call this method +from another method declaring its `CanThrow` capabilities like in the earlier examples. +This is because `CanThrow` has a contravariant type parameter so `CanThrow[E1 | E2]` is a subtype of both `CanThrow[E1]` and `CanThrow[E2]`. +Hence the presence of a given instance of `CanThrow[E1 | E2]` in scope satisfies the requirement for `CanThrow[E1]` and `CanThrow[E2]` +but given instances of `CanThrow[E1]` and `CanThrow[E2]` cannot be combined to provide and instance of `CanThrow[E1 | E2]`. + +**Note 2:** One should keep in mind that `|` binds its left and right arguments more tightly than `throws` so `A | B throws E1 | E2` means `(A | B) throws (Ex1 | Ex2)`, not `A | (B throws E1) | E2`. + +The `CanThrow`/`throws` combo essentially propagates the `CanThrow` requirement outwards. But where are these capabilities created in the first place? That's in the `try` expression. Given a `try` like this: + +```scala +try + body +catch + case ex1: Ex1 => handler1 + ... + case exN: ExN => handlerN +``` +the compiler generates an accumulated capability of type `CanThrow[Ex1 | ... | Ex2]` that is available as a given in the scope of `body`. It does this by augmenting the `try` roughly as follows: +```scala +try + erased given CanThrow[Ex1 | ... | ExN] = compiletime.erasedValue + body +catch ... +``` +Note that the right-hand side of the synthesized given is `???` (undefined). This is OK since +this given is erased; it will not be executed at runtime. + +**Note 1:** The [`saferExceptions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$saferExceptions$.html) feature is designed to work only with checked exceptions. An exception type is _checked_ if it is a subtype of +`Exception` but not of `RuntimeException`. The signature of `CanThrow` still admits `RuntimeException`s since `RuntimeException` is a proper subtype of its bound, `Exception`. But no capabilities will be generated for `RuntimeException`s. Furthermore, `throws` clauses +also may not refer to `RuntimeException`s. + +**Note 2:** To keep things simple, the compiler will currently only generate capabilities +for catch clauses of the form +```scala + case ex: Ex => +``` +where `ex` is an arbitrary variable name (`_` is also allowed), and `Ex` is an arbitrary +checked exception type. Constructor patterns such as `Ex(...)` or patterns with guards +are not allowed. The compiler will issue an error if one of these is used to catch +a checked exception and `saferExceptions` is enabled. + +## Example + +That's it. Let's see it in action in an example. First, add an import +```scala +import language.experimental.saferExceptions +``` +to enable exception checking. Now, define an exception `LimitExceeded` and +a function `f` like this: +```scala +val limit = 10e9 +class LimitExceeded extends Exception +def f(x: Double): Double = + if x < limit then x * x else throw LimitExceeded() +``` +You'll get this error message: +``` + if x < limit then x * x else throw LimitExceeded() + ^^^^^^^^^^^^^^^^^^^^^ +The capability to throw exception LimitExceeded is missing. +``` +The capability can be provided by one of the following: + + - Adding a using clause `(using CanThrow[LimitExceeded])` to the definition of the enclosing method + - Adding `throws LimitExceeded` clause after the result type of the enclosing method + - Wrapping this piece of code with a `try` block that catches `LimitExceeded` + +The following import might fix the problem: +```scala + import unsafeExceptions.canThrowAny +``` +As the error message implies, you have to declare that `f` needs the capability to throw a `LimitExceeded` exception. The most concise way to do so is to add a `throws` clause: +```scala +def f(x: Double): Double throws LimitExceeded = + if x < limit then x * x else throw LimitExceeded() +``` +Now put a call to `f` in a `try` that catches `LimitExceeded`: +```scala +@main def test(xs: Double*) = + try println(xs.map(f).sum) + catch case ex: LimitExceeded => println("too large") +``` +Run the program with some inputs: +``` +> scala test 1 2 3 +14.0 +> scala test +0.0 +> scala test 1 2 3 100000000000 +too large +``` +Everything typechecks and works as expected. But wait - we have called `map` without any ceremony! How did that work? Here's how the compiler expands the `test` function: +```scala +// compiler-generated code +@main def test(xs: Double*) = + try + erased given ctl: CanThrow[LimitExceeded] = compiletime.erasedValue + println(xs.map(x => f(x)(using ctl)).sum) + catch case ex: LimitExceeded => println("too large") +``` +The `CanThrow[LimitExceeded]` capability is passed in a synthesized `using` clause to `f`, since `f` requires it. Then the resulting closure is passed to `map`. The signature of `map` does not have to account for effects. It takes a closure as always, but that +closure may refer to capabilities in its free variables. This means that `map` is +already effect polymorphic even though we did not change its signature at all. +So the takeaway is that the effects as capabilities model naturally provides for effect polymorphism whereas this is something that other approaches struggle with. + +## Gradual Typing Via Imports + +Another advantage is that the model allows a gradual migration from current unchecked exceptions to safer exceptions. Imagine for a moment that [`experimental.saferExceptions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$saferExceptions$.html) is turned on everywhere. There would be lots of code that breaks since functions have not yet been properly annotated with `throws`. But it's easy to create an escape hatch that lets us ignore the breakages for a while: simply add the import +```scala +import scala.unsafeExceptions.canThrowAny +``` +This will provide the [`CanThrow`](https://scala-lang.org/api/3.x/scala/CanThrow.html) capability for any exception, and thereby allow +all throws and all other calls, no matter what the current state of `throws` declarations is. Here's the +definition of [`canThrowAny`](https://scala-lang.org/api/3.x/scala/unsafeExceptions$.html#canThrowAny-0): +```scala +package scala +object unsafeExceptions: + given canThrowAny: CanThrow[Exception] = ??? +``` +Of course, defining a global capability like this amounts to cheating. But the cheating is useful for gradual typing. The import could be used to migrate existing code, or to +enable more fluid explorations of code without regard for complete exception safety. At the end of these migrations or explorations the import should be removed. + +## Scope Of the Extension + +To summarize, the extension for safer exception checking consists of the following elements: + + - It adds to the standard library the class `scala.CanThrow`, the type `scala.$throws`, and the [`scala.unsafeExceptions`](https://scala-lang.org/api/3.x/scala/unsafeExceptions$.html) object, as they were described above. + - It adds some desugaring rules ro rewrite `throws` types to cascaded `$throws` types. + - It augments the type checking of `throw` by _demanding_ a `CanThrow` capability or the thrown exception. + - It augments the type checking of `try` by _providing_ `CanThrow` capabilities for every caught exception. + +That's all. It's quite remarkable that one can do exception checking in this way without any special additions to the type system. We just need regular givens and context functions. Any runtime overhead is eliminated using `erased`. + +## Caveats + +Our capability model allows to declare and check the thrown exceptions of first-order code. But as it stands, it does not give us enough mechanism to enforce the _absence_ of +capabilities for arguments to higher-order functions. Consider a variant `pureMap` +of `map` that should enforce that its argument does not throw exceptions or have any other effects (maybe because wants to reorder computations transparently). Right now +we cannot enforce that since the function argument to `pureMap` can capture arbitrary +capabilities in its free variables without them showing up in its type. One possible way to +address this would be to introduce a pure function type (maybe written `A -> B`). Pure functions are not allowed to close over capabilities. Then `pureMap` could be written +like this: +```scala + def pureMap(f: A -> B): List[B] +``` +Another area where the lack of purity requirements shows up is when capabilities escape from bounded scopes. Consider the following function +```scala +def escaped(xs: Double*): () => Int = + try () => xs.map(f).sum + catch case ex: LimitExceeded => -1 +``` +With the system presented here, this function typechecks, with expansion +```scala +// compiler-generated code +def escaped(xs: Double*): () => Int = + try + given ctl: CanThrow[LimitExceeded] = ??? + () => xs.map(x => f(x)(using ctl)).sum + catch case ex: LimitExceeded => -1 +``` +But if you try to call `escaped` like this +```scala +val g = escaped(1, 2, 1000000000) +g() +``` +the result will be a `LimitExceeded` exception thrown at the second line where `g` is called. What's missing is that `try` should enforce that the capabilities it generates do not escape as free variables in the result of its body. It makes sense to describe such scoped effects as _ephemeral capabilities_ - they have lifetimes that cannot be extended to delayed code in a lambda. + + +## Outlook + +We are working on a new class of type system that supports ephemeral capabilities by tracking the free variables of values. Once that research matures, it will hopefully be possible to augment the Scala language so that we can enforce the missing properties. + +And it would have many other applications besides: Exceptions are a special case of _algebraic effects_, which has been a very active research area over the last 20 years and is finding its way into programming languages (e.g. [Koka](https://koka-lang.github.io/koka/doc/book.html#why-handlers), [Eff](https://www.eff-lang.org/learn/), [Multicore OCaml](https://discuss.ocaml.org/t/multicore-ocaml-september-2021-effect-handlers-will-be-in-ocaml-5-0/8554), [Unison](https://www.unisonweb.org/docs/language-reference/#abilities-and-ability-handlers)). In fact, algebraic effects have been characterized as being equivalent to exceptions with an additional _resume_ operation. The techniques developed here for exceptions can probably be generalized to other classes of algebraic effects. + +But even without these additional mechanisms, exception checking is already useful as it is. It gives a clear path forward to make code that uses exceptions safer, better documented, and easier to refactor. The only loophole arises for scoped capabilities - here we have to verify manually that these capabilities do not escape. Specifically, a `try` always has to be placed in the same computation stage as the throws that it enables. + +Put another way: If the status quo is 0% static checking since 100% is too painful, then an alternative that gives you 95% static checking with great ergonomics looks like a win. And we might still get to 100% in the future. + +For more info, see also our [paper at the ACM Scala Symposium 2021](https://infoscience.epfl.ch/record/290885). diff --git a/docs/_spec/TODOreference/experimental/cc.md b/docs/_spec/TODOreference/experimental/cc.md new file mode 100644 index 000000000000..878bc0a64ed6 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/cc.md @@ -0,0 +1,738 @@ +--- +layout: doc-page +title: "Capture Checking" +--- + +Capture checking is a research project that modifies the Scala type system to track references to capabilities in values. It can be enabled with a `-Ycc` compiler option. +At present, capture checking is still highly experimental and unstable. + +To get an idea what capture checking can do, let's start with a small example: +```scala +def usingLogFile[T](op: FileOutputStream => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result +``` +The `usingLogFile` method invokes a given operation with a fresh log file as parameter. Once the operation has ended, the log file is closed and the +operation's result is returned. This is a typical _try-with-resources_ pattern, similar to many other such patterns which are often supported by special language constructs in other languages. + +The problem is that `usingLogFile`'s implementation is not entirely safe. One can +undermine it by passing an operation that performs the logging at some later point +after it has terminated. For instance: +```scala +val later = usingLogFile { file => () => file.write(0) } +later() // crash +``` +When `later` is executed it tries to write to a file that is already closed, which +results in an uncaught `IOException`. + +Capture checking gives us the mechanism to prevent such errors _statically_. To +prevent unsafe usages of `usingLogFile`, we can declare it like this: +```scala +def usingLogFile[T](op: ({*} FileOutputStream) => T): T = + // same body as before +``` +The only thing that's changed is that the `FileOutputStream` parameter of `op` is now +tagged with `{*}`. We'll see that this turns the parameter into a _capability_ whose lifetime is tracked. + +If we now try to define the problematic value `later`, we get a static error: +``` + | val later = usingLogFile { f => () => f.write(0) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` +In this case, it was easy to see that the `logFile` capability escapes in the closure passed to `usingLogFile`. But capture checking also works for more complex cases. +For instance, capture checking is able to distinguish between the following safe code: +```scala +val xs = usingLogFile { f => + List(1, 2, 3).map { x => f.write(x); x * x } +} +``` +and the following unsafe one: +```scala +val xs = usingLogFile { f => + LazyList(1, 2, 3).map { x => f.write(x); x * x } +} +``` +An error would be issued in the second case, but not the first one (this assumes a capture-aware +formulation of `LazyList` which we will present later in this page). + +It turns out that capture checking has very broad applications. Besides the various +try-with-resources patterns, it can also be a key part to the solutions of many other long standing problems in programming languages. Among them: + + - How to have a simple and flexible system for checked exceptions. We show later + how capture checking enables a clean and fully safe system for checked exceptions in Scala. + - How to address the problem of effect polymorphism in general. + - How to solve the "what color is your function?" problem of mixing synchronous + and asynchronous computations. + - How to do region-based allocation, safely, + - How to reason about capabilities associated with memory locations. + +The following sections explain in detail how capture checking works in Scala 3. + + +## Overview + +The capture checker extension introduces a new kind of types and it enforces some rules for working with these types. + +Capture checking is enabled by the compiler option `-Ycc`. If the option is not given, the new +type forms can still be written but they are not checked for consistency, because they are +treated simply as certain uninterpreted annotated types. + +## Capabilities and Capturing Types + +Capture checking is done in terms of _capturing types_ of the form +`{c₁, ..., cᵢ} T`. Here `T` is a type, and `{c₁, ..., cᵢ}` is a _capture set_ consisting of references to capabilities `c₁, ..., cᵢ`. + +A _capability_ is syntactically a method- or class-parameter, a local variable, or the `this` of an enclosing class. The type of a capability +must be a capturing type with a non-empty capture set. We also say that +variables that are capabilities are _tracked_. + +In a sense, every +capability gets its authority from some other, more sweeping capability which it captures. The most sweeping capability, from which ultimately all others are derived is written `*`. We call it the _universal capability_. + +Here is an example: +```scala +class FileSystem + +class Logger(fs: {*} FileSystem): + def log(s: String): Unit = ... // Write to a log file, using `fs` + +def test(fs: {*} FileSystem) = + val l: {fs} Logger = Logger(fs) + l.log("hello world!") + val xs: {l} LazyList[Int] = + LazyList.from(1) + .map { i => + l.log(s"computing elem # $i") + i * i + } + xs +``` +Here, the `test` method takes a `FileSystem` as a parameter. `fs` is a capability since its type has a non-empty capture set. The capability is passed to the `Logger` constructor +and retained as a field in class `Logger`. Hence, the local variable `l` has type +`{fs} Logger`: it is a `Logger` which retains the `fs` capability. + +The second variable defined in `test` is `xs`, a lazy list that is obtained from +`LazyList.from(1)` by logging and mapping consecutive numbers. Since the list is lazy, +it needs to retain the reference to the logger `l` for its computations. Hence, the +type of the list is `{l} LazyList[Int]`. On the other hand, since `xs` only logs but does +not do other file operations, it retains the `fs` capability only indirectly. That's why +`fs` does not show up in the capture set of `xs`. + +Capturing types come with a subtype relation where types with "smaller" capture sets are subtypes of types with larger sets (the _subcapturing_ relation is defined in more detail below). If a type `T` does not have a capture set, it is called _pure_, and is a subtype of +any capturing type that adds a capture set to `T`. + +## Function Types + +The usual function type `A => B` now stands for a function that can capture arbitrary capabilities. We call such functions +_impure_. By contrast, the new single arrow function type `A -> B` stands for a function that cannot capture any capabilities, or otherwise said, is _pure_. One can add a capture set in front of an otherwise pure function. +For instance, `{c, d} A -> B` would be a function that can capture capabilities `c` and `d`, but no others. + +The impure function type `A => B` is treated as an alias for `{*} A -> B`. That is, impure functions are functions that can capture anything. + +Function types and captures both associate to the right, so +```scala +{c} A -> {d} B -> C +``` +is the same as +```scala +{c} (A -> {d} (B -> C)) +``` +Contrast with +```scala +({c} A) -> ({d} B) -> C +``` +which is a curried pure function over argument types that can capture `c` and `d`, respectively. + +Analogous conventions apply to context function types. `A ?=> B` is an impure context function, with `A ?-> B` as its pure complement. + +**Note 1:** The identifiers `->` and `?->` are now treated as soft keywords when used as infix type operators. They are +still available as regular identifiers for terms. For instance, the mapping syntax `Map("x" -> 1, "y" -> 2)` is still supported since it only applies to terms. + +**Note 2:** The distinctions between pure vs impure function types do not apply to methods. In fact, since methods are not values they never capture anything directly. References to +capabilities in a method are instead counted in the capture set of the enclosing object. + +## By-Name Parameter Types + +A convention analogous to function types also extends to by-name parameters. In +```scala +def f(x: => Int): Int +``` +the actual argument can refer to arbitrary capabilities. So the following would be OK: +```scala +f(if p(y) then throw Ex() else 1) +``` +On the other hand, if `f` was defined like this +```scala +def f(x: -> Int): Int +``` +the actual argument to `f` could not refer to any capabilities, so the call above would be rejected. +One can also allow specific capabilities like this: +```scala +def f(x: {c}-> Int): Int +``` +Here, the actual argument to `f` is allowed to use the `c` capability but no others. + +**Note**: It is strongly recommended to write the capability set and the arrow `->` without intervening spaces, +as otherwise the notation would look confusingly like a function type. + +## Subtyping and Subcapturing + +Capturing influences subtyping. As usual we write `T₁ <: T₂` to express that the type +`T₁` is a subtype of the type `T₂`, or equivalently, that `T₁` conforms to `T₂`. An +analogous _subcapturing_ relation applies to capture sets. If `C₁` and `C₂` are capture sets, we write `C₁ <: C₂` to express that `C₁` _is covered by_ `C₂`, or, swapping the operands, that `C₂` _covers_ `C₁`. + +Subtyping extends as follows to capturing types: + + - Pure types are subtypes of capturing types. That is, `T <: C T`, for any type `T`, capturing set `C`. + - For capturing types, smaller capturing sets produce subtypes: `C₁ T₁ <: C₂ T₂` if + `C₁ <: C₂` and `T₁ <: T₂`. + +A subcapturing relation `C₁ <: C₂` holds if `C₂` _accounts for_ every element `c` in `C₁`. This means one of the following three conditions must be true: + + - `c ∈ C₂`, + - `c` refers to a parameter of some class `Cls` and `C₂` contains `Cls.this`, + - `c`'s type has capturing set `C` and `C₂` accounts for every element of `C` (that is, `C <: C₂`). + + +**Example 1.** Given +```scala +fs: {*} FileSystem +ct: {*} CanThrow[Exception] +l : {fs} Logger +``` +we have +``` +{l} <: {fs} <: {*} +{fs} <: {fs, ct} <: {*} +{ct} <: {fs, ct} <: {*} +``` +The set consisting of the root capability `{*}` covers every other capture set. This is +a consequence of the fact that, ultimately, every capability is created from `*`. + +**Example 2.** Consider again the FileSystem/Logger example from before. `LazyList[Int]` is a proper subtype of `{l} LazyList[Int]`. So if the `test` method in that example +was declared with a result type `LazyList[Int]`, we'd get a type error. Here is the error message: +``` +11 |def test(using fs: {*} FileSystem): LazyList[Int] = { + | ^ + | Found: {fs} LazyList[Int] + | Required: LazyList[Int] +``` +Why does it say `{fs} LazyList[Int]` and not `{l} LazyList[Int]`, which is, after all, the type of the returned value `xs`? The reason is that `l` is a local variable in the body of `test`, so it cannot be referred to in a type outside that body. What happens instead is that the type is _widened_ to the smallest supertype that does not mention `l`. Since `l` has capture set `fs`, we have that `{fs}` covers `{l}`, and `{fs}` is acceptable in a result type of `test`, so `{fs}` is the result of that widening. +This widening is called _avoidance_; it is not specific to capture checking but applies to all variable references in Scala types. + +## Capability Classes + +Classes like `CanThrow` or `FileSystem` have the property that their values are always intended to be capabilities. We can make this intention explicit and save boilerplate by declaring these classes with a `@capability` annotation. + +The capture set of a capability class type is always `{*}`. This means we could equivalently express the `FileSystem` and `Logger` classes as follows: +```scala +import annotation.capability + +@capability class FileSystem + +class Logger(using FileSystem): + def log(s: String): Unit = ??? + +def test(using fs: FileSystem) = + val l: {fs} Logger = Logger() + ... +``` +In this version, `FileSystem` is a capability class, which means that the `{*}` capture set is implied on the parameters of `Logger` and `test`. Writing the capture set explicitly produces a warning: +```scala +class Logger(using {*} FileSystem): + ^^^^^^^^^^^^^^ + redundant capture: FileSystem already accounts for * +``` +Another, unrelated change in the version of the last example here is that the `FileSystem` capability is now passed as an implicit parameter. It is quite natural to model capabilities with implicit parameters since it greatly reduces the wiring overhead once multiple capabilities are in play. + +## Capture Checking of Closures + +If a closure refers to capabilities in its body, it captures these capabilities in its type. For instance, consider: +```scala +def test(fs: FileSystem): {fs} String -> Unit = + (x: String) => Logger(fs).log(x) +``` +Here, the body of `test` is a lambda that refers to the capability `fs`, which means that `fs` is retained in the lambda. +Consequently, the type of the lambda is `{fs} String -> Unit`. + +**Note:** Function values are always written with `=>` (or `?=>` for context functions). There is no syntactic +distinction for pure _vs_ impure function values. The distinction is only made in their types. + +A closure also captures all capabilities that are captured by the functions +it calls. For instance, in +```scala +def test(fs: FileSystem) = + def f() = g() + def g() = (x: String) => Logger(fs).log(x) + f +``` +the result of `test` has type `{fs} String -> Unit` even though function `f` itself does not refer to `fs`. + +## Capture Checking of Classes + +The principles for capture checking closures also apply to classes. For instance, consider: +```scala +class Logger(using fs: FileSystem): + def log(s: String): Unit = ... summon[FileSystem] ... + +def test(xfs: FileSystem): {xfs} Logger = + Logger(xfs) +``` +Here, class `Logger` retains the capability `fs` as a (private) field. Hence, the result +of `test` is of type `{xfs} Logger` + +Sometimes, a tracked capability is meant to be used only in the constructor of a class, but +is not intended to be retained as a field. This fact can be communicated to the capture +checker by declaring the parameter as `@constructorOnly`. Example: +```scala +import annotation.constructorOnly + +class NullLogger(using @constructorOnly fs: FileSystem): + ... +def test2(using fs: FileSystem): NullLogger = NullLogger() // OK +``` + +The captured references of a class include _local capabilities_ and _argument capabilities_. Local capabilities are capabilities defined outside the class and referenced from its body. Argument capabilities are passed as parameters to the primary constructor of the class. Local capabilities are inherited: +the local capabilities of a superclass are also local capabilities of its subclasses. Example: + +```scala +@capability class Cap + +def test(a: Cap, b: Cap, c: Cap) = + class Super(y: Cap): + def f = a + class Sub(x: Cap) extends Super(x) + def g = b + Sub(c) +``` +Here class `Super` has local capability `a`, which gets inherited by class +`Sub` and is combined with `Sub`'s own local capability `b`. Class `Sub` also has an argument capability corresponding to its parameter `x`. This capability gets instantiated to `c` in the final constructor call `Sub(c)`. Hence, +the capture set of that call is `{a, b, c}`. + +The capture set of the type of `this` of a class is inferred by the capture checker, unless the type is explicitly declared with a self type annotation like this one: +```scala +class C: + self: {a, b} D => ... +``` +The inference observes the following constraints: + + - The type of `this` of a class `C` includes all captured references of `C`. + - The type of `this` of a class `C` is a subtype of the type of `this` + of each parent class of `C`. + - The type of `this` must observe all constraints where `this` is used. + +For instance, in +```scala +@capability class Cap +def test(c: Cap) = + class A: + val x: A = this + def f = println(c) // error +``` +we know that the type of `this` must be pure, since `this` is the right hand side of a `val` with type `A`. However, in the last line we find that the capture set of the class, and with it the capture set of `this`, would include `c`. This leads to a contradiction, and hence to a checking error: +``` +16 | def f = println(c) // error + | ^ + |(c : Cap) cannot be referenced here; it is not included in the allowed capture set {} +``` + +## Capture Tunnelling + +Consider the following simple definition of a `Pair` class: +```scala +class Pair[+A, +B](x: A, y: B): + def fst: A = x + def snd: B = y +``` +What happens if `Pair` is instantiated like this (assuming `ct` and `fs` are two capabilities in scope)? +```scala +def x: {ct} Int -> String +def y: {fs} Logger +def p = Pair(x, y) +``` +The last line will be typed as follows: +```scala +def p: Pair[{ct} Int -> String, {fs} Logger] = Pair(x, y) +``` +This might seem surprising. The `Pair(x, y)` value does capture capabilities `ct` and `fs`. Why don't they show up in its type at the outside? + +The answer is capture tunnelling. Once a type variable is instantiated to a capturing type, the +capture is not propagated beyond this point. On the other hand, if the type variable is instantiated +again on access, the capture information "pops out" again. For instance, even though `p` is technically pure because its capture set is empty, writing `p.fst` would record a reference to the captured capability `ct`. So if this access was put in a closure, the capability would again form part of the outer capture set. E.g. +```scala +() => p.fst : {ct} () -> {ct} Int -> String +``` +In other words, references to capabilities "tunnel through" in generic instantiations from creation to access; they do not affect the capture set of the enclosing generic data constructor applications. +This principle plays an important part in making capture checking concise and practical. + +## Escape Checking + +The universal capability `*` should be conceptually available only as a parameter to the main program. Indeed, if it was available everywhere, capability checking would be undermined since one could mint new capabilities +at will. In line with this reasoning, some capture sets are restricted so that +they are not allowed to contain the universal capability. + +Specifically, if a capturing type is an instance of a type variable, that capturing type +is not allowed to carry the universal capability `{*}`. There's a connection to tunnelling here. +The capture set of a type has to be present in the environment when a type is instantiated from +a type variable. But `*` is not itself available as a global entity in the environment. Hence, +an error should result. + +We can now reconstruct how this principle produced the error in the introductory example, where +`usingLogFile` was declared like this: +```scala +def usingLogFile[T](op: ({*} FileOutputStream) => T): T = ... +``` +The error message was: +``` + | val later = usingLogFile { f => () => f.write(0) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` +This error message was produced by the following logic: + + - The `f` parameter has type `{*} FileOutputStream`, which makes it a capability. + - Therefore, the type of the expression `() => f.write(0)` is `{f} () -> Unit`. + - This makes the type of the whole closure passed to `usingLogFile` the dependent function type + `(f: {*} FileOutputStream) -> {f} () -> Unit`. + - The expected type of the closure is a simple, parametric, impure function type `({*} FileOutputStream) => T`, + for some instantiation of the type variable `T`. + - The smallest supertype of the closure's dependent function type that is a parametric function type is + `({*} FileOutputStream) => {*} () -> Unit` + - Hence, the type variable `T` is instantiated to `* () -> Unit`, which causes the error. + +An analogous restriction applies to the type of a mutable variable. +Another way one could try to undermine capture checking would be to +assign a closure with a local capability to a global variable. Maybe +like this: +```scala +var loophole: {*} () -> Unit = () => () +usingLogFile { f => + loophole = () => f.write(0) +} +loophole() +``` +But this will not compile either, since mutable variables cannot have universal capture sets. + +One also needs to prevent returning or assigning a closure with a local capability in an argument of a parametric type. For instance, here is a +slightly more refined attack: +```scala +class Cell[+A](x: A) +val sneaky = usingLogFile { f => Cell(() => f.write(0)) } +sneaky.x() +``` +At the point where the `Cell` is created, the capture set of the argument is `f`, which +is OK. But at the point of use, it is `*` (because `f` is no longer in scope), which causes again an error: +``` + | sneaky.x() + | ^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` + +Looking at object graphs, we observe a monotonicity property: The capture set of an object `x` covers the capture sets of all objects reachable through `x`. This property is reflected in the type system by the following _monotonicity rule_: + + - In a class `C` with a field `f`, the capture set `{this}` covers the capture set `{this.f}` as well as the capture set of any application of `this.f` to pure arguments. + +## Checked Exceptions + +Scala enables checked exceptions through a language import. Here is an example, +taken from the [safer exceptions page](./canthrow.md), and also described in a +[paper](https://infoscience.epfl.ch/record/290885) presented at the + 2021 Scala Symposium. +```scala +import language.experimental.saferExceptions + +class LimitExceeded extends Exception + +val limit = 10e+10 +def f(x: Double): Double throws LimitExceeded = + if x < limit then x * x else throw LimitExceeded() +``` +The new `throws` clause expands into an implicit parameter that provides +a `CanThrow` capability. Hence, function `f` could equivalently be written +like this: +```scala +def f(x: Double)(using CanThrow[LimitExceeded]): Double = ... +``` +If the implicit parameter is missing, an error is reported. For instance, the function definition +```scala +def g(x: Double): Double = + if x < limit then x * x else throw LimitExceeded() +``` +is rejected with this error message: +``` + | if x < limit then x * x else throw LimitExceeded() + | ^^^^^^^^^^^^^^^^^^^^^ + |The capability to throw exception LimitExceeded is missing. + |The capability can be provided by one of the following: + | - Adding a using clause `(using CanThrow[LimitExceeded])` to the definition of the enclosing method + | - Adding `throws LimitExceeded` clause after the result type of the enclosing method + | - Wrapping this piece of code with a `try` block that catches LimitExceeded +``` +`CanThrow` capabilities are required by `throw` expressions and are created +by `try` expressions. For instance, the expression +```scala +try xs.map(f).sum +catch case ex: LimitExceeded => -1 +``` +would be expanded by the compiler to something like the following: +```scala +try + erased given ctl: CanThrow[LimitExceeded] = compiletime.erasedValue + xs.map(f).sum +catch case ex: LimitExceeded => -1 +``` +(The `ctl` capability is only used for type checking but need not show up in the generated code, so it can be declared as +erased.) + +As with other capability based schemes, one needs to guard against capabilities +that are captured in results. For instance, here is a problematic use case: +```scala +def escaped(xs: Double*): (() => Double) throws LimitExceeded = + try () => xs.map(f).sum + catch case ex: LimitExceeded => () => -1 +val crasher = escaped(1, 2, 10e+11) +crasher() +``` +This code needs to be rejected since otherwise the call to `crasher()` would cause +an unhandled `LimitExceeded` exception to be thrown. + +Under `-Ycc`, the code is indeed rejected +``` +14 | try () => xs.map(f).sum + | ^ + |The expression's type {*} () -> Double is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +15 | catch case ex: LimitExceeded => () => -1 +``` +To integrate exception and capture checking, only two changes are needed: + + - `CanThrow` is declared as a `@capability` class, so all references to `CanThrow` instances are tracked. + - Escape checking is extended to `try` expressions. The result type of a `try` is not allowed to + capture the universal capability. + +## A Larger Example + +As a larger example, we present an implementation of lazy lists and some use cases. For simplicity, +our lists are lazy only in their tail part. This corresponds to what the Scala-2 type `Stream` did, whereas Scala 3's `LazyList` type computes strictly less since it is also lazy in the first argument. + +Here is the base trait `LzyList` for our version of lazy lists: +```scala +trait LzyList[+A]: + def isEmpty: Boolean + def head: A + def tail: {this} LzyList[A] +``` +Note that `tail` carries a capture annotation. It says that the tail of a lazy list can +potentially capture the same references as the lazy list as a whole. + +The empty case of a `LzyList` is written as usual: +```scala +object LzyNil extends LzyList[Nothing]: + def isEmpty = true + def head = ??? + def tail = ??? +``` +Here is a formulation of the class for lazy cons nodes: +```scala +import scala.compiletime.uninitialized + +final class LzyCons[+A](hd: A, tl: () => {*} LzyList[A]) extends LzyList[A]: + private var forced = false + private var cache: {this} LzyList[A] = uninitialized + private def force = + if !forced then { cache = tl(); forced = true } + cache + + def isEmpty = false + def head = hd + def tail: {this} LzyList[A] = force +end LzyCons +``` +The `LzyCons` class takes two parameters: A head `hd` and a tail `tl`, which is a function +returning a `LzyList`. Both the function and its result can capture arbitrary capabilities. +The result of applying the function is memoized after the first dereference of `tail` in +the private mutable field `cache`. Note that the typing of the assignment `cache = tl()` relies on the monotonicity rule for `{this}` capture sets. + +Here is an extension method to define an infix cons operator `#:` for lazy lists. It is analogous +to `::` but instead of a strict list it produces a lazy list without evaluating its right operand. +```scala +extension [A](x: A) + def #:(xs1: => {*} LzyList[A]): {xs1} LzyList[A] = + LzyCons(x, () => xs1) +``` +Note that `#:` takes an impure call-by-name parameter `xs1` as its right argument. The result +of `#:` is a lazy list that captures that argument. + +As an example usage of `#:`, here is a method `tabulate` that creates a lazy list +of given length with a generator function `gen`. The generator function is allowed +to have side effects. +```scala +def tabulate[A](n: Int)(gen: Int => A) = + def recur(i: Int): {gen} LzyList[A] = + if i == n then LzyNil + else gen(i) #: recur(i + 1) + recur(0) +``` +Here is a use of `tabulate`: +```scala +class LimitExceeded extends Exception +def squares(n: Int)(using ct: CanThrow[LimitExceeded]) = + tabulate(10) { i => + if i > 9 then throw LimitExceeded() + i * i + } +``` +The inferred result type of `squares` is `{ct} LzyList[Int]`, i.e it is a lazy list of +`Int`s that can throw the `LimitExceeded` exception when it is elaborated by calling `tail` +one or more times. + +Here are some further extension methods for mapping, filtering, and concatenating lazy lists: +```scala +extension [A](xs: {*} LzyList[A]) + def map[B](f: A => B): {xs, f} LzyList[B] = + if xs.isEmpty then LzyNil + else f(xs.head) #: xs.tail.map(f) + + def filter(p: A => Boolean): {xs, p} LzyList[A] = + if xs.isEmpty then LzyNil + else if p(xs.head) then xs.head #: xs.tail.filter(p) + else xs.tail.filter(p) + + def concat(ys: {*} LzyList[A]): {xs, ys} LzyList[A] = + if xs.isEmpty then ys + else xs.head #: xs.tail.concat(ys) + + def drop(n: Int): {xs} LzyList[A] = + if n == 0 then xs else xs.tail.drop(n - 1) +``` +Their capture annotations are all as one would expect: + + - Mapping a lazy list produces a lazy list that captures the original list as well + as the (possibly impure) mapping function. + - Filtering a lazy list produces a lazy list that captures the original list as well + as the (possibly impure) filtering predicate. + - Concatenating two lazy lists produces a lazy list that captures both arguments. + - Dropping elements from a lazy list gives a safe approximation where the original list is captured in the result. In fact, it's only some suffix of the list that is retained at run time, but our modelling identifies lazy lists and their suffixes, so this additional knowledge would not be useful. + +Of course the function passed to `map` or `filter` could also be pure. After all, `A -> B` is a subtype of `{*} A -> B` which is the same as `A => B`. In that case, the pure function +argument will _not_ show up in the result type of `map` or `filter`. For instance: +```scala +val xs = squares(10) +val ys: {xs} LzyList[Int] = xs.map(_ + 1) +``` +The type of the mapped list `ys` has only `xs` in its capture set. The actual function +argument does not show up since it is pure. Likewise, if the lazy list +`xs` was pure, it would not show up in any of the method results. +This demonstrates that capability-based +effect systems with capture checking are naturally _effect polymorphic_. + +This concludes our example. It's worth mentioning that an equivalent program defining and using standard, strict lists would require no capture annotations whatsoever. It would compile exactly as written now in standard Scala 3, yet one gets the capture checking for free. Essentially, `=>` already means "can capture anything" and since in a strict list side effecting operations are not retained in the result, there are no additional captures to record. A strict list could of course capture side-effecting closures in its elements but then tunnelling applies, since +these elements are represented by a type variable. This means we don't need to annotate anything there either. + +Another possibility would be a variant of lazy lists that requires all functions passed to `map`, `filter` and other operations like it to be pure. E.g. `map` on such a list would be defined like this: +```scala +extension [A](xs: LzyList[A]) + def map[B](f: A -> B): LzyList[B] = ... +``` +That variant would not require any capture annotations either. + +To summarize, there are two "sweet spots" of data structure design: strict lists in +side-effecting or resource-aware code and lazy lists in purely functional code. +Both are already correctly capture-typed without requiring any explicit annotations. Capture annotations only come into play where the semantics gets more complicated because we deal with delayed effects such as in impure lazy lists or side-effecting iterators over strict lists. This property is probably one of the greatest plus points of our approach to capture checking compared to previous techniques which tend to be more noisy. + +## Function Type Shorthands + +TBD + +## Compilation Options + +The following options are relevant for capture checking. + + - **-Ycc** Enables capture checking. + - **-Xprint:cc** Prints the program with capturing types as inferred by capture checking. + - **-Ycc-debug** Gives more detailed, implementation-oriented information about capture checking, as described in the next section. + + The implementation supporting capture checking with these options is currently in branch `cc-experiment` on dotty.epfl.ch. + +## Capture Checking Internals + +The capture checker is architected as a propagation constraint solver, which runs as a separate phase after type-checking and some initial transformations. + +Constraint variables stand for unknown capture sets. A constraint variable is introduced + + - for every part of a previously inferred type, + - for the accessed references of every method, class, anonymous function, or by-name argument, + - for the parameters passed in a class constructor call. + +Capture sets in explicitly written types are treated as constants (before capture checking, such sets are simply ignored). + +The capture checker essentially rechecks the program with the usual typing rules. Every time a subtype requirement between capturing types is checked, this translates to a subcapturing test on capture sets. If the two sets are constant, this is simply a yes/no question, where a no will produce an error message. + +If the lower set `C₁` of a comparison `C₁ <: C₂` is a variable, the set `C₂` is recorded +as a _superset_ of `C₁`. If the upper set `C₂` is a variable, the elements of `C₁` are _propagated_ to `C₂`. Propagation of an element `x` to a set `C` means that `x` is included as an element in `C`, and it is also propagated +to all known supersets of `C`. If such a superset is a constant, it is checked that `x` is included in it. If that's not the case, the original comparison `C₁ <: C₂` has no solution and an error is reported. + +The type checker also performs various maps on types, for instance when substituting actual argument types for formal parameter types in dependent functions, or mapping +member types with "as-seen-from" in a selection. Maps keep track of the variance +of positions in a type. The variance is initially covariant, it flips to +contravariant in function parameter positions, and can be either covariant, +contravariant, or nonvariant in type arguments, depending on the variance of +the type parameter. + +When capture checking, the same maps are also performed on capture sets. If a capture set is a constant, its elements (which are capabilities) are mapped as regular types. If the result of such a map is not a capability, the result is approximated according to the variance of the type. A covariant approximation replaces a type by its capture set. +A contravariant approximation replaces it with the empty capture set. A nonvariant +approximation replaces the enclosing capturing type with a range of possible types +that gets propagated and resolved further out. + +When a mapping `m` is performed on a capture set variable `C`, a new variable `Cm` is created that contains the mapped elements and that is linked with `C`. If `C` subsequently acquires further elements through propagation, these are also propagated to `Cm` after being transformed by the `m` mapping. `Cm` also gets the same supersets as `C`, mapped again using `m`. + +One interesting aspect of the capture checker concerns the implementation of capture tunnelling. The [foundational theory](https://infoscience.epfl.ch/record/290885) on which capture checking is based makes tunnelling explicit through so-called _box_ and +_unbox_ operations. Boxing hides a capture set and unboxing recovers it. The capture checker inserts virtual box and unbox operations based on actual and expected types similar to the way the type checker inserts implicit conversions. When capture set variables are first introduced, any capture set in a capturing type that is an instance of a type parameter instance is marked as "boxed". A boxing operation is +inserted if the expected type of an expression is a capturing type with +a boxed capture set variable. The effect of the insertion is that any references +to capabilities in the boxed expression are forgotten, which means that capture +propagation is stopped. Dually, if the actual type of an expression has +a boxed variable as capture set, an unbox operation is inserted, which adds all +elements of the capture set to the environment. + +Boxing and unboxing has no runtime effect, so the insertion of these operations is only simulated; the only visible effect is the retraction and insertion +of variables in the capture sets representing the environment of the currently checked expression. + +The `-Ycc-debug` option provides some insight into the workings of the capture checker. +When it is turned on, boxed sets are marked explicitly and capture set variables are printed with an ID and some information about their provenance. For instance, the string `{f, xs}33M5V` indicates a capture set +variable that is known to hold elements `f` and `xs`. The variable's ID is `33`. The `M` +indicates that the variable was created through a mapping from a variable with ID `5`. The latter is a regular variable, as indicated + by `V`. + +Generally, the string following the capture set consists of alternating numbers and letters where each number gives a variable ID and each letter gives the provenance of the variable. Possible letters are + + - `V` : a regular variable, + - `M` : a variable resulting from a _mapping_ of the variable indicated by the string to the right, + - `B` : similar to `M` but where the mapping is a _bijection_, + - `F` : a variable resulting from _filtering_ the elements of the variable indicated by the string to the right, + - `I` : a variable resulting from an _intersection_ of two capture sets, + - `D` : a variable resulting from the set _difference_ of two capture sets. + +At the end of a compilation run, `-Ycc-debug` will print all variable dependencies of variables referred to in previous output. Here is an example: +``` +Capture set dependencies: + {}2V :: + {}3V :: + {}4V :: + {f, xs}5V :: {f, xs}31M5V, {f, xs}32M5V + {f, xs}31M5V :: {xs, f} + {f, xs}32M5V :: +``` +This section lists all variables that appeared in previous diagnostics and their dependencies, recursively. For instance, we learn that + + - variables 2, 3, 4 are empty and have no dependencies, + - variable `5` has two dependencies: variables `31` and `32` which both result from mapping variable `5`, + - variable `31` has a constant fixed superset `{xs, f}` + - variable `32` has no dependencies. + diff --git a/docs/_spec/TODOreference/experimental/erased-defs-spec.md b/docs/_spec/TODOreference/experimental/erased-defs-spec.md new file mode 100644 index 000000000000..5395a8468399 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/erased-defs-spec.md @@ -0,0 +1,64 @@ +--- +layout: doc-page +title: "Erased Definitions - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/erased-defs-spec.html +--- + +TODO: complete +## Rules + +1. `erased` is a soft modifier. It can appear: + * At the start of a parameter block of a method, function or class + * In a method definition + * In a `val` definition (but not `lazy val` or `var`) + * In a `class` or `trait` definition + + ```scala + erased val x = ... + erased def f = ... + + def g(erased x: Int) = ... + + (erased x: Int) => ... + def h(x: (erased Int) => Int) = ... + + class K(erased x: Int) { ... } + erased class E {} + ``` + + +2. A reference to an `erased` val or def can only be used + * Inside the expression of argument to an `erased` parameter + * Inside the body of an `erased` `val` or `def` + + +3. Functions + * `(erased x1: T1, x2: T2, ..., xN: TN) => y : (erased T1, T2, ..., TN) => R` + * `(given erased x1: T1, x2: T2, ..., xN: TN) => y: (given erased T1, T2, ..., TN) => R` + * `(given erased T1) => R <:< erased T1 => R` + * `(given erased T1, T2) => R <:< (erased T1, T2) => R` + * ... + + Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`) + + +4. Eta expansion + + if `def f(erased x: T): U` then `f: (erased T) => U`. + + +5. Erasure semantics + * All `erased` parameters are removed from the function + * All argument to `erased` parameters are not passed to the function + * All `erased` definitions are removed + * All `(erased T1, T2, ..., TN) => R` and `(given erased T1, T2, ..., TN) => R` become `() => R` + + +6. Overloading + + Method with `erased` parameters will follow the normal overloading constraints after erasure. + + +7. Overriding + * Member definitions overriding each other must both be `erased` or not be `erased` + * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa diff --git a/docs/_spec/TODOreference/experimental/erased-defs.md b/docs/_spec/TODOreference/experimental/erased-defs.md new file mode 100644 index 000000000000..28455f26cdc0 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/erased-defs.md @@ -0,0 +1,231 @@ +--- +layout: doc-page +title: "Erased Definitions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/erased-defs.html +--- + +`erased` is a modifier that expresses that some definition or expression is erased by the compiler instead of being represented in the compiled output. It is not yet part of the Scala language standard. To enable `erased`, turn on the language feature +[`experimental.erasedDefinitions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$erasedDefinitions$.html). This can be done with a language import +```scala +import scala.language.experimental.erasedDefinitions +``` +or by setting the command line option `-language:experimental.erasedDefinitions`. +Erased definitions must be in an experimental scope (see [Experimental definitions](../other-new-features/experimental-defs.md)). + +## Why erased terms? + +Let's describe the motivation behind erased terms with an example. In the +following we show a simple state machine which can be in a state `On` or `Off`. +The machine can change state from `Off` to `On` with `turnedOn` only if it is +currently `Off`. This last constraint is captured with the `IsOff[S]` contextual +evidence which only exists for `IsOff[Off]`. For example, not allowing calling +`turnedOn` on in an `On` state as we would require an evidence of type +`IsOff[On]` that will not be found. + +```scala +sealed trait State +final class On extends State +final class Off extends State + +@implicitNotFound("State must be Off") +class IsOff[S <: State] +object IsOff: + given isOff: IsOff[Off] = new IsOff[Off] + +class Machine[S <: State]: + def turnedOn(using IsOff[S]): Machine[On] = new Machine[On] + +val m = new Machine[Off] +m.turnedOn +m.turnedOn.turnedOn // ERROR +// ^ +// State must be Off +``` + +Note that in the code above the actual context arguments for `IsOff` are never +used at runtime; they serve only to establish the right constraints at compile +time. As these terms are never used at runtime there is not real need to have +them around, but they still need to be present in some form in the generated +code to be able to do separate compilation and retain binary compatibility. We +introduce _erased terms_ to overcome this limitation: we are able to enforce the +right constrains on terms at compile time. These terms have no run time +semantics and they are completely erased. + +## How to define erased terms? + +Parameters of methods and functions can be declared as erased, placing `erased` +in front of a parameter list (like `given`). + +```scala +def methodWithErasedEv(erased ev: Ev): Int = 42 + +val lambdaWithErasedEv: erased Ev => Int = + (erased ev: Ev) => 42 +``` + +`erased` parameters will not be usable for computations, though they can be used +as arguments to other `erased` parameters. + +```scala +def methodWithErasedInt1(erased i: Int): Int = + i + 42 // ERROR: can not use i + +def methodWithErasedInt2(erased i: Int): Int = + methodWithErasedInt1(i) // OK +``` + +Not only parameters can be marked as erased, `val` and `def` can also be marked +with `erased`. These will also only be usable as arguments to `erased` +parameters. + +```scala +erased val erasedEvidence: Ev = ... +methodWithErasedEv(erasedEvidence) +``` + +## What happens with erased values at runtime? + +As `erased` are guaranteed not to be used in computations, they can and will be +erased. + +```scala +// becomes def methodWithErasedEv(): Int at runtime +def methodWithErasedEv(erased ev: Ev): Int = ... + +def evidence1: Ev = ... +erased def erasedEvidence2: Ev = ... // does not exist at runtime +erased val erasedEvidence3: Ev = ... // does not exist at runtime + +// evidence1 is not evaluated and no value is passed to methodWithErasedEv +methodWithErasedEv(evidence1) +``` + +## State machine with erased evidence example + +The following example is an extended implementation of a simple state machine +which can be in a state `On` or `Off`. The machine can change state from `Off` +to `On` with `turnedOn` only if it is currently `Off`, conversely from `On` to +`Off` with `turnedOff` only if it is currently `On`. These last constraint are +captured with the `IsOff[S]` and `IsOn[S]` given evidence only exist for +`IsOff[Off]` and `IsOn[On]`. For example, not allowing calling `turnedOff` on in +an `Off` state as we would require an evidence `IsOn[Off]` that will not be +found. + +As the given evidences of `turnedOn` and `turnedOff` are not used in the +bodies of those functions we can mark them as `erased`. This will remove the +evidence parameters at runtime, but we would still evaluate the `isOn` and +`isOff` givens that were found as arguments. As `isOn` and `isOff` are not +used except as `erased` arguments, we can mark them as `erased`, hence removing +the evaluation of the `isOn` and `isOff` evidences. + +```scala +import scala.annotation.implicitNotFound + +sealed trait State +final class On extends State +final class Off extends State + +@implicitNotFound("State must be Off") +class IsOff[S <: State] +object IsOff: + // will not be called at runtime for turnedOn, the + // compiler will only require that this evidence exists + given IsOff[Off] = new IsOff[Off] + +@implicitNotFound("State must be On") +class IsOn[S <: State] +object IsOn: + // will not exist at runtime, the compiler will only + // require that this evidence exists at compile time + erased given IsOn[On] = new IsOn[On] + +class Machine[S <: State] private (): + // ev will disappear from both functions + def turnedOn(using erased ev: IsOff[S]): Machine[On] = new Machine[On] + def turnedOff(using erased ev: IsOn[S]): Machine[Off] = new Machine[Off] + +object Machine: + def newMachine(): Machine[Off] = new Machine[Off] + +@main def test = + val m = Machine.newMachine() + m.turnedOn + m.turnedOn.turnedOff + + // m.turnedOff + // ^ + // State must be On + + // m.turnedOn.turnedOn + // ^ + // State must be Off +``` + +Note that in [Inline](../metaprogramming/inline.md) we discussed `erasedValue` and inline +matches. `erasedValue` is implemented with `erased`, so the state machine above +can be encoded as follows: + +```scala +import scala.compiletime.* + +sealed trait State +final class On extends State +final class Off extends State + +class Machine[S <: State]: + transparent inline def turnOn(): Machine[On] = + inline erasedValue[S] match + case _: Off => new Machine[On] + case _: On => error("Turning on an already turned on machine") + + transparent inline def turnOff(): Machine[Off] = + inline erasedValue[S] match + case _: On => new Machine[Off] + case _: Off => error("Turning off an already turned off machine") + +object Machine: + def newMachine(): Machine[Off] = + println("newMachine") + new Machine[Off] +end Machine + +@main def test = + val m = Machine.newMachine() + m.turnOn() + m.turnOn().turnOff() + m.turnOn().turnOn() // error: Turning on an already turned on machine +``` + +## Erased Classes + +`erased` can also be used as a modifier for a class. An erased class is intended to be used only in erased definitions. If the type of a val definition or parameter is +a (possibly aliased, refined, or instantiated) erased class, the definition is assumed to be `erased` itself. Likewise, a method with an erased class return type is assumed to be `erased` itself. Since given instances expand to vals and defs, they are also assumed to be erased if the type they produce is an erased class. Finally +function types with erased classes as arguments turn into erased function types. + +Example: +```scala +erased class CanRead + +val x: CanRead = ... // `x` is turned into an erased val +val y: CanRead => Int = ... // the function is turned into an erased function +def f(x: CanRead) = ... // `f` takes an erased parameter +def g(): CanRead = ... // `g` is turned into an erased def +given CanRead = ... // the anonymous given is assumed to be erased +``` +The code above expands to +```scala +erased class CanRead + +erased val x: CanRead = ... +val y: (erased CanRead) => Int = ... +def f(erased x: CanRead) = ... +erased def g(): CanRead = ... +erased given CanRead = ... +``` +After erasure, it is checked that no references to values of erased classes remain and that no instances of erased classes are created. So the following would be an error: +```scala +val err: Any = CanRead() // error: illegal reference to erased class CanRead +``` +Here, the type of `err` is `Any`, so `err` is not considered erased. Yet its initializing value is a reference to the erased class `CanRead`. + +[More Details](./erased-defs-spec.md) diff --git a/docs/_spec/TODOreference/experimental/explicit-nulls.md b/docs/_spec/TODOreference/experimental/explicit-nulls.md new file mode 100644 index 000000000000..b3fa53429cfe --- /dev/null +++ b/docs/_spec/TODOreference/experimental/explicit-nulls.md @@ -0,0 +1,543 @@ +--- +layout: doc-page +title: "Explicit Nulls" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/explicit-nulls.html +--- + +Explicit nulls is an opt-in feature that modifies the Scala type system, which makes reference types +(anything that extends [`AnyRef`](https://scala-lang.org/api/3.x/scala/AnyRef.html)) _non-nullable_. + +This means the following code will no longer typecheck: + +```scala +val x: String = null // error: found `Null`, but required `String` +``` + +Instead, to mark a type as nullable we use a [union type](../new-types/union-types.md) + +```scala +val x: String | Null = null // ok +``` + +A nullable type could have null value during runtime; hence, it is not safe to select a member without checking its nullity. + +```scala +x.trim // error: trim is not member of String | Null +``` + +Explicit nulls are enabled via a `-Yexplicit-nulls` flag. + +Read on for details. + +## New Type Hierarchy + +Originally, `Null` is a subtype of all reference types. + +!["Original Type Hierarchy"](images/explicit-nulls/scalaHierarchyWithMatchable.png) + +When explicit nulls is enabled, the type hierarchy changes so that `Null` is only +a subtype of `Any` and `Matchable`, as opposed to every reference type, +which means `null` is no longer a value of `AnyRef` and its subtypes. + +This is the new type hierarchy: + +!["Type Hierarchy for Explicit Nulls"](images/explicit-nulls/scalaHierarchyWithMatchableAndSafeNull.png) + +After erasure, `Null` remains a subtype of all reference types (as forced by the JVM). + +## Working with `Null` + +To make working with nullable values easier, we propose adding a few utilities to the standard library. +So far, we have found the following useful: + +- An extension method `.nn` to "cast away" nullability + + ```scala + extension [T](x: T | Null) + inline def nn: T = + assert(x != null) + x.asInstanceOf[T] + ``` + + This means that given `x: String | Null`, `x.nn` has type `String`, so we can call all the + usual methods on it. Of course, `x.nn` will throw a NPE if `x` is `null`. + + Don't use `.nn` on mutable variables directly, because it may introduce an unknown type into the type of the variable. + +- An [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) language feature. + + When imported, `T | Null` can be used as `T`, similar to regular Scala (without explicit nulls). + + See [UnsafeNulls](#unsafenulls) section for more details. + +## Unsoundness + +The new type system is unsound with respect to `null`. This means there are still instances where an expression has a non-nullable type like `String`, but its value is actually `null`. + +The unsoundness happens because uninitialized fields in a class start out as `null`: + +```scala +class C: + val f: String = foo(f) + def foo(f2: String): String = f2 + +val c = new C() +// c.f == "field is null" +``` + +The unsoundness above can be caught by the compiler with the option `-Ysafe-init`. +More details can be found in [safe initialization](../other-new-features/safe-initialization.md). + +## Equality + +We don't allow the double-equal (`==` and `!=`) and reference (`eq` and `ne`) comparison between +`AnyRef` and `Null` anymore, since a variable with a non-nullable type cannot have `null` as value. +`null` can only be compared with `Null`, nullable union (`T | Null`), or `Any` type. + +For some reason, if we really want to compare `null` with non-null values, we have to provide a type hint (e.g. `: Any`). + +```scala +val x: String = ??? +val y: String | Null = ??? + +x == null // error: Values of types String and Null cannot be compared with == or != +x eq null // error +"hello" == null // error + +y == null // ok +y == x // ok + +(x: String | Null) == null // ok +(x: Any) == null // ok +``` + +## Java Interoperability + +The Scala compiler can load Java classes in two ways: from source or from bytecode. In either case, +when a Java class is loaded, we "patch" the type of its members to reflect that Java types +remain implicitly nullable. + +Specifically, we patch + +- the type of fields + +- the argument type and return type of methods + +We illustrate the rules with following examples: + +- The first two rules are easy: we nullify reference types but not value types. + + ```java + class C { + String s; + int x; + } + ``` + + ==> + + ```scala + class C: + val s: String | Null + val x: Int + ``` + +- We nullify type parameters because in Java a type parameter is always nullable, so the following code compiles. + + ```java + class C { T foo() { return null; } } + ``` + + ==> + + ```scala + class C[T] { def foo(): T | Null } + ``` + + Notice this is rule is sometimes too conservative, as witnessed by + + ```scala + class InScala: + val c: C[Bool] = ??? // C as above + val b: Bool = c.foo() // no longer typechecks, since foo now returns Bool | Null + ``` + +- We can reduce the number of redundant nullable types we need to add. Consider + + ```java + class Box { T get(); } + class BoxFactory { Box makeBox(); } + ``` + + ==> + + ```scala + class Box[T] { def get(): T | Null } + class BoxFactory[T] { def makeBox(): Box[T] | Null } + ``` + + Suppose we have a `BoxFactory[String]`. Notice that calling `makeBox()` on it returns a + `Box[String] | Null`, not a `Box[String | Null] | Null`. This seems at first + glance unsound ("What if the box itself has `null` inside?"), but is sound because calling + `get()` on a `Box[String]` returns a `String | Null`. + + Notice that we need to patch _all_ Java-defined classes that transitively appear in the + argument or return type of a field or method accessible from the Scala code being compiled. + Absent crazy reflection magic, we think that all such Java classes _must_ be visible to + the Typer in the first place, so they will be patched. + +- We will append `Null` to the type arguments if the generic class is defined in Scala. + + ```java + class BoxFactory { + Box makeBox(); // Box is Scala-defined + List>> makeCrazyBoxes(); // List is Java-defined + } + ``` + + ==> + + ```scala + class BoxFactory[T]: + def makeBox(): Box[T | Null] | Null + def makeCrazyBoxes(): java.util.List[Box[java.util.List[T] | Null]] | Null + ``` + + In this case, since `Box` is Scala-defined, we will get `Box[T | Null] | Null`. + This is needed because our nullability function is only applied (modularly) to the Java + classes, but not to the Scala ones, so we need a way to tell `Box` that it contains a + nullable value. + + The `List` is Java-defined, so we don't append `Null` to its type argument. But we + still need to nullify its inside. + +- We don't nullify _simple_ literal constant (`final`) fields, since they are known to be non-null + + ```java + class Constants { + final String NAME = "name"; + final int AGE = 0; + final char CHAR = 'a'; + + final String NAME_GENERATED = getNewName(); + } + ``` + + ==> + + ```scala + class Constants: + val NAME: String("name") = "name" + val AGE: Int(0) = 0 + val CHAR: Char('a') = 'a' + + val NAME_GENERATED: String | Null = getNewName() + ``` + +- We don't append `Null` to a field nor to a return type of a method which is annotated with a + `NotNull` annotation. + + ```java + class C { + @NotNull String name; + @NotNull List getNames(String prefix); // List is Java-defined + @NotNull Box getBoxedName(); // Box is Scala-defined + } + ``` + + ==> + + ```scala + class C: + val name: String + def getNames(prefix: String | Null): java.util.List[String] // we still need to nullify the paramter types + def getBoxedName(): Box[String | Null] // we don't append `Null` to the outmost level, but we still need to nullify inside + ``` + + The annotation must be from the list below to be recognized as `NotNull` by the compiler. + Check `Definitions.scala` for an updated list. + + ```scala + // A list of annotations that are commonly used to indicate + // that a field/method argument or return type is not null. + // These annotations are used by the nullification logic in + // JavaNullInterop to improve the precision of type nullification. + // We don't require that any of these annotations be present + // in the class path, but we want to create Symbols for the + // ones that are present, so they can be checked during nullification. + @tu lazy val NotNullAnnots: List[ClassSymbol] = ctx.getClassesIfDefined( + "javax.annotation.Nonnull" :: + "edu.umd.cs.findbugs.annotations.NonNull" :: + "androidx.annotation.NonNull" :: + "android.support.annotation.NonNull" :: + "android.annotation.NonNull" :: + "com.android.annotations.NonNull" :: + "org.eclipse.jdt.annotation.NonNull" :: + "org.checkerframework.checker.nullness.qual.NonNull" :: + "org.checkerframework.checker.nullness.compatqual.NonNullDecl" :: + "org.jetbrains.annotations.NotNull" :: + "lombok.NonNull" :: + "io.reactivex.annotations.NonNull" :: Nil map PreNamedString) + ``` + +### Override check + +When we check overriding between Scala classes and Java classes, the rules are relaxed for [`Null`](https://scala-lang.org/api/3.x/scala/Null.html) type with this feature, in order to help users to working with Java libraries. + +Suppose we have Java method `String f(String x)`, we can override this method in Scala in any of the following forms: + +```scala +def f(x: String | Null): String | Null + +def f(x: String): String | Null + +def f(x: String | Null): String + +def f(x: String): String +``` + +Note that some of the definitions could cause unsoundness. For example, the return type is not nullable, but a `null` value is actually returned. + +## Flow Typing + +We added a simple form of flow-sensitive type inference. The idea is that if `p` is a +stable path or a trackable variable, then we can know that `p` is non-null if it's compared +with `null`. This information can then be propagated to the `then` and `else` branches +of an if-statement (among other places). + +Example: + +```scala +val s: String | Null = ??? +if s != null then + // s: String + +// s: String | Null + +assert(s != null) +// s: String +``` + +A similar inference can be made for the `else` case if the test is `p == null` + +```scala +if s == null then + // s: String | Null +else + // s: String +``` + +`==` and `!=` is considered a comparison for the purposes of the flow inference. + +### Logical Operators + +We also support logical operators (`&&`, `||`, and `!`): + +```scala +val s: String | Null = ??? +val s2: String | Null = ??? +if s != null && s2 != null then + // s: String + // s2: String + +if s == null || s2 == null then + // s: String | Null + // s2: String | Null +else + // s: String + // s2: String +``` + +### Inside Conditions + +We also support type specialization _within_ the condition, taking into account that `&&` and `||` are short-circuiting: + +```scala +val s: String | Null = ??? + +if s != null && s.length > 0 then // s: String in `s.length > 0` + // s: String + +if s == null || s.length > 0 then // s: String in `s.length > 0` + // s: String | Null +else + // s: String +``` + +### Match Case + +The non-null cases can be detected in match statements. + +```scala +val s: String | Null = ??? + +s match + case _: String => // s: String + case _ => +``` + +### Mutable Variable + +We are able to detect the nullability of some local mutable variables. A simple example is: + +```scala +class C(val x: Int, val next: C | Null) + +var xs: C | Null = C(1, C(2, null)) +// xs is trackable, since all assignments are in the same method +while xs != null do + // xs: C + val xsx: Int = xs.x + val xscpy: C = xs + xs = xscpy // since xscpy is non-null, xs still has type C after this line + // xs: C + xs = xs.next // after this assignment, xs can be null again + // xs: C | Null +``` + +When dealing with local mutable variables, there are two questions: + +1. Whether to track a local mutable variable during flow typing. + We track a local mutable variable if the variable is not assigned in a closure. + For example, in the following code `x` is assigned to by the closure `y`, so we do not + do flow typing on `x`. + + ```scala + var x: String | Null = ??? + def y = + x = null + + if x != null then + // y can be called here, which would break the fact + val a: String = x // error: x is captured and mutated by the closure, not trackable + ``` + +2. Whether to generate and use flow typing on a specific _use_ of a local mutable variable. + We only want to do flow typing on a use that belongs to the same method as the definition + of the local variable. + For example, in the following code, even `x` is not assigned to by a closure, we can only + use flow typing in one of the occurrences (because the other occurrence happens within a + nested closure). + + ```scala + var x: String | Null = ??? + def y = + if x != null then + // not safe to use the fact (x != null) here + // since y can be executed at the same time as the outer block + val _: String = x + if x != null then + val a: String = x // ok to use the fact here + x = null + ``` + +See [more examples](https://github.com/lampepfl/dotty/blob/main/tests/explicit-nulls/neg/flow-varref-in-closure.scala). + +Currently, we are unable to track paths with a mutable variable prefix. +For example, `x.a` if `x` is mutable. + +### Unsupported Idioms + +We don't support: + +- flow facts not related to nullability (`if x == 0 then { // x: 0.type not inferred }`) +- tracking aliasing between non-nullable paths + + ```scala + val s: String | Null = ??? + val s2: String | Null = ??? + if s != null && s == s2 then + // s: String inferred + // s2: String not inferred + ``` + +### UnsafeNulls + +It is difficult to work with many nullable values, we introduce a language feature [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html). +Inside this "unsafe" scope, all `T | Null` values can be used as `T`. + +Users can import [`scala.language.unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) to create such scopes, or use `-language:unsafeNulls` to enable this feature globally (for migration purpose only). + +Assume `T` is a reference type (a subtype of `AnyRef`), the following unsafe operation rules are +applied in this unsafe-nulls scope: + +1. the members of `T` can be found on `T | Null` + +2. a value with type `T` can be compared with `T | Null` and `Null` + +3. suppose `T1` is not a subtype of `T2` using explicit-nulls subtyping (where `Null` is a direct +subtype of Any), extension methods and implicit conversions designed for `T2` can be used for +`T1` if `T1` is a subtype of `T2` using regular subtyping rules (where `Null` is a subtype of every +reference type) + +4. suppose `T1` is not a subtype of `T2` using explicit-nulls subtyping, a value with type `T1` +can be used as `T2` if `T1` is a subtype of `T2` using regular subtyping rules + +Addtionally, `null` can be used as `AnyRef` (`Object`), which means you can select `.eq` or `.toString` on it. + +The program in [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) will have a **similar** semantic as regular Scala, but not **equivalent**. + +For example, the following code cannot be compiled even using unsafe nulls. Because of the +Java interoperation, the type of the get method becomes `T | Null`. + +```scala +def head[T](xs: java.util.List[T]): T = xs.get(0) // error +``` + +Since the compiler doesn’t know whether `T` is a reference type, it is unable to cast `T | Null` +to `T`. A `.nn` need to be inserted after `xs.get(0)` by user manually to fix the error, which +strips the `Null` from its type. + +The intention of this [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) is to give users a better migration path for explicit nulls. +Projects for Scala 2 or regular Scala 3 can try this by adding `-Yexplicit-nulls -language:unsafeNulls` +to the compile options. A small number of manual modifications are expected. To migrate to the full +explicit nulls feature in the future, `-language:unsafeNulls` can be dropped and add +`import scala.language.unsafeNulls` only when needed. + +```scala +def f(x: String): String = ??? +def nullOf[T >: Null]: T = null + +import scala.language.unsafeNulls + +val s: String | Null = ??? +val a: String = s // unsafely convert String | Null to String + +val b1 = s.trim // call .trim on String | Null unsafely +val b2 = b1.length + +f(s).trim // pass String | Null as an argument of type String unsafely + +val c: String = null // Null to String + +val d1: Array[String] = ??? +val d2: Array[String | Null] = d1 // unsafely convert Array[String] to Array[String | Null] +val d3: Array[String] = Array(null) // unsafe + +class C[T >: Null <: String] // define a type bound with unsafe conflict bound + +val n = nullOf[String] // apply a type bound unsafely +``` + +Without the [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html), all these unsafe operations will not be type-checked. + +[`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) also works for extension methods and implicit search. + +```scala +import scala.language.unsafeNulls + +val x = "hello, world!".split(" ").map(_.length) + +given Conversion[String, Array[String]] = _ => ??? + +val y: String | Null = ??? +val z: Array[String | Null] = y +``` + +## Binary Compatibility + +Our strategy for binary compatibility with Scala binaries that predate explicit nulls +and new libraries compiled without `-Yexplicit-nulls` is to leave the types unchanged +and be compatible but unsound. + +[More details](https://dotty.epfl.ch/docs/internals/explicit-nulls.html) diff --git a/docs/_spec/TODOreference/experimental/fewer-braces.md b/docs/_spec/TODOreference/experimental/fewer-braces.md new file mode 100644 index 000000000000..eb454886ad03 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/fewer-braces.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Fewer Braces" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/fewer-braces.html +--- + +The documentation contained in this file is now part of [./indentation.html]. \ No newline at end of file diff --git a/docs/_spec/TODOreference/experimental/main-annotation.md b/docs/_spec/TODOreference/experimental/main-annotation.md new file mode 100644 index 000000000000..0c60e1050b87 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/main-annotation.md @@ -0,0 +1,97 @@ +--- +layout: doc-page +title: "MainAnnotation" +--- + +`MainAnnotation` provides a generic way to define main annotations such as `@main`. + +When a users annotates a method with an annotation that extends `MainAnnotation` a class with a `main` method will be generated. The main method will contain the code needed to parse the command line arguments and run the application. + +```scala +/** Sum all the numbers + * + * @param first Fist number to sum + * @param rest The rest of the numbers to sum + */ +@myMain def sum(first: Int, second: Int = 0, rest: Int*): Int = first + second + rest.sum +``` + +```scala +object foo { + def main(args: Array[String]): Unit = { + val mainAnnot = new myMain() + val info = new Info( + name = "foo.main", + documentation = "Sum all the numbers", + parameters = Seq( + new Parameter("first", "scala.Int", hasDefault=false, isVarargs=false, "Fist number to sum", Seq()), + new Parameter("second", "scala.Int", hasDefault=true, isVarargs=false, "", Seq()), + new Parameter("rest", "scala.Int" , hasDefault=false, isVarargs=true, "The rest of the numbers to sum", Seq()) + ) + ) + val mainArgsOpt = mainAnnot.command(info, args) + if mainArgsOpt.isDefined then + val mainArgs = mainArgsOpt.get + val args0 = mainAnnot.argGetter[Int](info.parameters(0), mainArgs(0), None) // using a parser of Int + val args1 = mainAnnot.argGetter[Int](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) // using a parser of Int + val args2 = mainAnnot.varargGetter[Int](info.parameters(2), mainArgs.drop(2)) // using a parser of Int + mainAnnot.run(() => sum(args0(), args1(), args2()*)) + } +} +``` + +The implementation of the `main` method first instantiates the annotation and then call `command`. +When calling the `command`, the arguments can be checked and preprocessed. +Then it defines a series of argument getters calling `argGetter` for each parameter and `varargGetter` for the last one if it is a varargs. `argGetter` gets an optional lambda that computes the default argument. +Finally, the `run` method is called to run the application. It receives a by-name argument that contains the call the annotated method with the instantiations arguments (using the lambdas from `argGetter`/`varargGetter`). + + +Example of implementation of `myMain` that takes all arguments positionally. It used `util.CommandLineParser.FromString` and expects no default arguments. For simplicity, any errors in preprocessing or parsing results in crash. + +```scala +// Parser used to parse command line arguments +import scala.util.CommandLineParser.FromString[T] + +// Result type of the annotated method is Int and arguments are parsed using FromString +@experimental class myMain extends MainAnnotation[FromString, Int]: + import MainAnnotation.{ Info, Parameter } + + def command(info: Info, args: Seq[String]): Option[Seq[String]] = + if args.contains("--help") then + println(info.documentation) + None // do not parse or run the program + else if info.parameters.exists(_.hasDefault) then + println("Default arguments are not supported") + None + else if info.hasVarargs then + val numPlainArgs = info.parameters.length - 1 + if numPlainArgs > args.length then + println("Not enough arguments") + None + else + Some(args) + else + if info.parameters.length > args.length then + println("Not enough arguments") + None + else if info.parameters.length < args.length then + println("Too many arguments") + None + else + Some(args) + + def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using parser: FromString[T]): () => T = + () => parser.fromString(arg) + + def varargGetter[T](param: Parameter, args: Seq[String])(using parser: FromString[T]): () => Seq[T] = + () => args.map(arg => parser.fromString(arg)) + + def run(program: () => Int): Unit = + println("executing program") + + val result = program() + println("result: " + result) + println("executed program") + +end myMain +``` diff --git a/docs/_spec/TODOreference/experimental/named-typeargs-spec.md b/docs/_spec/TODOreference/experimental/named-typeargs-spec.md new file mode 100644 index 000000000000..9e1113bbac86 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/named-typeargs-spec.md @@ -0,0 +1,41 @@ +--- +layout: doc-page +title: "Named Type Arguments - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/named-typeargs-spec.html +--- + +In this section we give more details about the [named type arguments](named-typeargs.md) (*experimental*). + +## Syntax + +The addition to the grammar is: + +``` +SimpleExpr1 ::= ... + | SimpleExpr (TypeArgs | NamedTypeArgs) +NamedTypeArgs ::= ‘[’ NamedTypeArg {‘,’ NamedTypeArg} ‘]’ +NamedTypeArg ::= id ‘=’ Type +``` + +Note in particular that named arguments cannot be passed to type constructors: + +``` scala +class C[T] + +val x: C[T = Int] = // error + new C[T = Int] // error + +class E extends C[T = Int] // error +``` + +## Compatibility considerations + +Named type arguments do not have an impact on binary compatibility, but they +have an impact on source compatibility: if the name of a method type parameter +is changed, any existing named reference to this parameter will break. This +means that the names of method type parameters are now part of the public API +of a library. + +(Unimplemented proposal: to mitigate this, +[`scala.deprecatedName`](https://www.scala-lang.org/api/current/scala/deprecatedName.html) +could be extended to also be applicable on method type parameters.) diff --git a/docs/_spec/TODOreference/experimental/named-typeargs.md b/docs/_spec/TODOreference/experimental/named-typeargs.md new file mode 100644 index 000000000000..4928a40f8a6a --- /dev/null +++ b/docs/_spec/TODOreference/experimental/named-typeargs.md @@ -0,0 +1,34 @@ +--- +layout: doc-page +title: "Named Type Arguments" +redirectFrom: /docs/reference/other-new-features/named-typeargs.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/named-typeargs.html +--- + +**Note:** This feature is implemented in Scala 3, but is not expected to be part of Scala 3.0. + +Type arguments of methods can now be specified by name as well as by position. Example: + +``` scala +def construct[Elem, Coll[_]](xs: Elem*): Coll[Elem] = ??? + +val xs1 = construct[Coll = List, Elem = Int](1, 2, 3) +val xs2 = construct[Coll = List](1, 2, 3) +``` + +Similar to a named value argument `(x = e)`, a named type argument +`[X = T]` instantiates the type parameter `X` to the type `T`. +Named type arguments do not have to be in order (see `xs1` above) and +unspecified arguments are inferred by the compiler (see `xs2` above). +Type arguments must be all named or un-named, mixtures of named and +positional type arguments are not supported. + +## Motivation + +The main benefit of named type arguments is that unlike positional arguments, +you are allowed to omit passing arguments for some parameters, like in the +definition of `xs2` above. A missing type argument is inferred as usual by +local type inference. This is particularly useful in situations where some type +arguments can be easily inferred from others. + +[More details](./named-typeargs-spec.md) diff --git a/docs/_spec/TODOreference/experimental/numeric-literals.md b/docs/_spec/TODOreference/experimental/numeric-literals.md new file mode 100644 index 000000000000..f493ef459265 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/numeric-literals.md @@ -0,0 +1,257 @@ +--- +layout: doc-page +title: "Numeric Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/numeric-literals.html +--- + +**Note**: This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: + +```scala +import scala.language.experimental.genericNumberLiterals +``` + +In Scala 2, numeric literals were confined to the primitive numeric types `Int`, `Long`, `Float`, and `Double`. Scala 3 allows to write numeric literals also for user-defined types. Example: + +```scala +val x: Long = -10_000_000_000 +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 110_222_799_799.99 + +(y: BigInt) match + case 123_456_789_012_345_678_901 => +``` + +The syntax of numeric literals is the same as before, except there are no pre-set limits +how large they can be. + +## Meaning of Numeric Literals + +The meaning of a numeric literal is determined as follows: + +- If the literal ends with `l` or `L`, it is a `Long` integer (and must fit in its legal range). +- If the literal ends with `f` or `F`, it is a single precision floating point number of type `Float`. +- If the literal ends with `d` or `D`, it is a double precision floating point number of type `Double`. + +In each of these cases the conversion to a number is exactly as in Scala 2 or in Java. If a numeric literal does _not_ end in one of these suffixes, its meaning is determined by the expected type: + +1. If the expected type is `Int`, `Long`, `Float`, or `Double`, the literal is + treated as a standard literal of that type. +2. If the expected type is a fully defined type `T` that has a given instance of type + [`scala.util.FromDigits[T]`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html), the literal is converted to a value of type `T` by passing it as an argument to + the `fromDigits` method of that instance (more details below). +3. Otherwise, the literal is treated as a `Double` literal (if it has a decimal point or an + exponent), or as an `Int` literal (if not). (This last possibility is again as in Scala 2 or Java.) + +With these rules, the definition + +```scala +val x: Long = -10_000_000_000 +``` + +is legal by rule (1), since the expected type is `Long`. The definitions + +```scala +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 111222333444.55 +``` + +are legal by rule (2), since both `BigInt` and `BigDecimal` have [`FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html) instances (which implement the `FromDigits` subclasses [`FromDigits.WithRadix`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$WithRadix.html) and [`FromDigits.Decimal`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$Decimal.html), respectively). On the other hand, + +```scala +val x = -10_000_000_000 +``` + +gives a type error, since without an expected type `-10_000_000_000` is treated by rule (3) as an `Int` literal, but it is too large for that type. + +## The `FromDigits` Trait + +To allow numeric literals, a type simply has to define a `given` instance of the +[`scala.util.FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html) type class, or one of its subclasses. `FromDigits` is defined as follows: + +```scala +trait FromDigits[T]: + def fromDigits(digits: String): T +``` + +Implementations of `fromDigits` convert strings of digits to the values of the +implementation type `T`. +The `digits` string consists of digits between `0` and `9`, possibly preceded by a +sign ("+" or "-"). Number separator characters `_` are filtered out before +the string is passed to `fromDigits`. + +The companion object [`FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits$.html) also defines subclasses of `FromDigits` for whole numbers with a given radix, for numbers with a decimal point, and for numbers that can have both a decimal point and an exponent: + +```scala +object FromDigits: + + /** A subclass of `FromDigits` that also allows to convert whole + * number literals with a radix other than 10 + */ + trait WithRadix[T] extends FromDigits[T]: + def fromDigits(digits: String): T = fromDigits(digits, 10) + def fromDigits(digits: String, radix: Int): T + + /** A subclass of `FromDigits` that also allows to convert number + * literals containing a decimal point ".". + */ + trait Decimal[T] extends FromDigits[T] + + /** A subclass of `FromDigits`that allows also to convert number + * literals containing a decimal point "." or an + * exponent `('e' | 'E')['+' | '-']digit digit*`. + */ + trait Floating[T] extends Decimal[T] +``` + +A user-defined number type can implement one of those, which signals to the compiler +that hexadecimal numbers, decimal points, or exponents are also accepted in literals +for this type. + +## Error Handling + +`FromDigits` implementations can signal errors by throwing exceptions of some subtype +of [`FromDigitsException`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$FromDigitsException.html). `FromDigitsException` is defined with three subclasses in the +`FromDigits` object as follows: + +```scala +abstract class FromDigitsException(msg: String) extends NumberFormatException(msg) + +class NumberTooLarge (msg: String = "number too large") extends FromDigitsException(msg) +class NumberTooSmall (msg: String = "number too small") extends FromDigitsException(msg) +class MalformedNumber(msg: String = "malformed number literal") extends FromDigitsException(msg) +``` + +## Example + +As a fully worked out example, here is an implementation of a new numeric class, `BigFloat`, that accepts numeric literals. `BigFloat` is defined in terms of a `BigInt` mantissa and an `Int` exponent: + +```scala +case class BigFloat(mantissa: BigInt, exponent: Int): + override def toString = s"${mantissa}e${exponent}" +``` + +`BigFloat` literals can have a decimal point as well as an exponent. E.g. the following expression +should produce the `BigFloat` number `BigFloat(-123, 997)`: + +```scala +-0.123E+1000: BigFloat +``` + +The companion object of `BigFloat` defines an `apply` constructor method to construct a `BigFloat` +from a `digits` string. Here is a possible implementation: + +```scala +object BigFloat: + import scala.util.FromDigits + + def apply(digits: String): BigFloat = + val (mantissaDigits, givenExponent) = + digits.toUpperCase.split('E') match + case Array(mantissaDigits, edigits) => + val expo = + try FromDigits.intFromDigits(edigits) + catch case ex: FromDigits.NumberTooLarge => + throw FromDigits.NumberTooLarge(s"exponent too large: $edigits") + (mantissaDigits, expo) + case Array(mantissaDigits) => + (mantissaDigits, 0) + val (intPart, exponent) = + mantissaDigits.split('.') match + case Array(intPart, decimalPart) => + (intPart ++ decimalPart, givenExponent - decimalPart.length) + case Array(intPart) => + (intPart, givenExponent) + BigFloat(BigInt(intPart), exponent) +``` + +To accept `BigFloat` literals, all that's needed in addition is a `given` instance of type +`FromDigits.Floating[BigFloat]`: + +```scala + given FromDigits: FromDigits.Floating[BigFloat] with + def fromDigits(digits: String) = apply(digits) +end BigFloat +``` + +Note that the `apply` method does not check the format of the `digits` argument. It is +assumed that only valid arguments are passed. For calls coming from the compiler +that assumption is valid, since the compiler will first check whether a numeric +literal has the correct format before it gets passed on to a conversion method. + +## Compile-Time Errors + +With the setup of the previous section, a literal like + +```scala +1e10_0000_000_000: BigFloat +``` + +would be expanded by the compiler to + +```scala +BigFloat.FromDigits.fromDigits("1e100000000000") +``` + +Evaluating this expression throws a [`NumberTooLarge`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$NumberTooLarge.html) exception at run time. We would like it to +produce a compile-time error instead. We can achieve this by tweaking the `BigFloat` class +with a small dose of metaprogramming. The idea is to turn the `fromDigits` method +into a macro, i.e. make it an inline method with a splice as right-hand side. +To do this, replace the `FromDigits` instance in the `BigFloat` object by the following two definitions: + +```scala +object BigFloat: + ... + + class FromDigits extends FromDigits.Floating[BigFloat]: + def fromDigits(digits: String) = apply(digits) + + given FromDigits with + override inline def fromDigits(digits: String) = ${ + fromDigitsImpl('digits) + } +``` + +Note that an inline method cannot directly fill in for an abstract method, since it produces +no code that can be executed at runtime. That is why we define an intermediary class +`FromDigits` that contains a fallback implementation which is then overridden by the inline +method in the `FromDigits` given instance. That method is defined in terms of a macro +implementation method `fromDigitsImpl`. Here is its definition: + +```scala + private def fromDigitsImpl(digits: Expr[String])(using ctx: Quotes): Expr[BigFloat] = + digits.value match + case Some(ds) => + try + val BigFloat(m, e) = apply(ds) + '{BigFloat(${Expr(m)}, ${Expr(e)})} + catch case ex: FromDigits.FromDigitsException => + ctx.error(ex.getMessage) + '{BigFloat(0, 0)} + case None => + '{apply($digits)} +end BigFloat +``` + +The macro implementation takes an argument of type `Expr[String]` and yields +a result of type `Expr[BigFloat]`. It tests whether its argument is a constant +string. If that is the case, it converts the string using the `apply` method +and lifts the resulting `BigFloat` back to `Expr` level. For non-constant +strings `fromDigitsImpl(digits)` is simply `apply(digits)`, i.e. everything is +evaluated at runtime in this case. + +The interesting part is the `catch` part of the case where `digits` is constant. +If the `apply` method throws a `FromDigitsException`, the exception's message is issued as a compile time error in the `ctx.error(ex.getMessage)` call. + +With this new implementation, a definition like + +```scala +val x: BigFloat = 1234.45e3333333333 +``` + +would give a compile time error message: + +```scala +3 | val x: BigFloat = 1234.45e3333333333 + | ^^^^^^^^^^^^^^^^^^ + | exponent too large: 3333333333 +``` diff --git a/docs/_spec/TODOreference/experimental/overview.md b/docs/_spec/TODOreference/experimental/overview.md new file mode 100644 index 000000000000..254f103896e4 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/overview.md @@ -0,0 +1,29 @@ +--- +layout: doc-page +title: "Experimental" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/overview.html +redirectFrom: overview.html +--- + +## Experimental language features + +All experimental language features can be found under the `scala.language.experimental` package. +They are enabled by importing the feature or using the `-language` compiler flag. + +* [`erasedDefinitions`](./erased-defs.md): Enable support for `erased` modifier. +* `fewerBraces`: Enable support for using indentation for arguments. +* [`genericNumberLiterals`](./numeric-literals.md): Enable support for generic number literals. +* [`namedTypeArguments`](./named-typeargs.md): Enable support for named type arguments +* [`saferExceptions`](./canthrow.md): Enable support for checked exceptions. + +## Experimental language imports + +In general, experimental language features can be imported in an experimental scope (see [experimental definitions](../other-new-features/experimental-defs.md)). +They can be imported at the top-level if all top-level definitions are `@experimental`. + +## Experimental language features supported by special compiler options + +Some experimental language features that are still in research and development can be enabled with special compiler options. These include + +* [`-Yexplicit-nulls`](./explicit-nulls.md). Enable support for tracking null references in the type system. +* [`-Ycc`](./cc.md). Enable support for capture checking. diff --git a/docs/_spec/TODOreference/experimental/tupled-function.md b/docs/_spec/TODOreference/experimental/tupled-function.md new file mode 100644 index 000000000000..da108fc832ad --- /dev/null +++ b/docs/_spec/TODOreference/experimental/tupled-function.md @@ -0,0 +1,82 @@ +--- +layout: doc-page +title: "Tupled Function" +--- + +Tupled Function +---------------------- + +With functions bounded to arities up to 22 it was possible to generalize some operation on all function types using overloading. +Now that we have functions and tuples generalized to [arities above 22](../dropped-features/limit22.md) overloading is not an option anymore. +The type class `TupleFunction` provides a way to abstract directly over a function of any arity converting it to an equivalent function that receives all arguments in a single tuple. + +```scala +/** Type class relating a `FunctionN[..., R]` with an equivalent tupled function `Function1[TupleN[...], R]` + * + * @tparam F a function type + * @tparam G a tupled function type (function of arity 1 receiving a tuple as argument) + */ +@implicitNotFound("${F} cannot be tupled as ${G}") +sealed trait TupledFunction[F, G] { + def tupled(f: F): G + def untupled(g: G): F +} +``` + +The compiler will synthesize an instance of `TupledFunction[F, G]` if: + +* `F` is a function type of arity `N` +* `G` is a function with a single tuple argument of size `N` and its types are equal to the arguments of `F` +* The return type of `F` is equal to the return type of `G` +* `F` and `G` are the same sort of function (both are `(...) => R` or both are `(...) ?=> R`) +* If only one of `F` or `G` is instantiated the second one is inferred. + +Examples +-------- +`TupledFunction` can be used to generalize the `Function1.tupled`, ... `Function22.tupled` methods to functions of any arities. +The following defines `tupled` as [extension method](../contextual/extension-methods.html) ([full example](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-tupled.scala)). + +```scala +/** Creates a tupled version of this function: instead of N arguments, + * it accepts a single [[scala.Tuple]] with N elements as argument. + * + * @tparam F the function type + * @tparam Args the tuple type with the same types as the function arguments of F + * @tparam R the return type of F + */ +extension [F, Args <: Tuple, R](f: F) + def tupled(using tf: TupledFunction[F, Args => R]): Args => R = tf.tupled(f) +``` + +`TupledFunction` can be used to generalize the `Function.untupled` to a function of any arities ([full example](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-untupled.scala)) + +```scala +/** Creates an untupled version of this function: instead of a single argument of type [[scala.Tuple]] with N elements, + * it accepts N arguments. + * + * This is a generalization of [[scala.Function.untupled]] that work on functions of any arity + * + * @tparam F the function type + * @tparam Args the tuple type with the same types as the function arguments of F + * @tparam R the return type of F + */ +extension [F, Args <: Tuple, R](f: Args => R) + def untupled(using tf: TupledFunction[F, Args => R]): F = tf.untupled(f) +``` + +`TupledFunction` can also be used to generalize the [`Tuple1.compose`](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-compose.scala) and [`Tuple1.andThen`](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-andThen.scala) methods to compose functions of larger arities and with functions that return tuples. + +```scala +/** Composes two instances of TupledFunction into a new TupledFunction, with this function applied last. + * + * @tparam F a function type + * @tparam G a function type + * @tparam FArgs the tuple type with the same types as the function arguments of F and return type of G + * @tparam GArgs the tuple type with the same types as the function arguments of G + * @tparam R the return type of F + */ +extension [F, G, FArgs <: Tuple, GArgs <: Tuple, R](f: F) + def compose(g: G)(using tg: TupledFunction[G, GArgs => FArgs], tf: TupledFunction[F, FArgs => R]): GArgs => R = { + (x: GArgs) => tf.tupled(f)(tg.tupled(g)(x)) +} +``` diff --git a/docs/_spec/TODOreference/features-classification.md b/docs/_spec/TODOreference/features-classification.md new file mode 100644 index 000000000000..36cea3b9e72d --- /dev/null +++ b/docs/_spec/TODOreference/features-classification.md @@ -0,0 +1,199 @@ +--- +layout: doc-page +title: "A Classification of Proposed Language Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/features-classification.html +--- + +This document provides an overview of the constructs proposed for Scala 3 with the aim to facilitate the discussion what to include and when to include it. It classifies features into eight groups: (1) essential foundations, (2) simplifications, (3) restrictions, (4) dropped features, (5) changed features, (6) new features, (7) features oriented towards metaprogramming with the aim to replace existing macros, and (8) changes to type checking and inference. + +Each group contains sections classifying the status (i.e. relative importance to be a part of Scala 3, and relative urgency when to decide this) and the migration cost +of the constructs in it. + +The current document reflects the state of things as of April, 2019. It will be updated to reflect any future changes in that status. + +## Essential Foundations + +These new constructs directly model core features of [DOT](https://www.scala-lang.org/blog/2016/02/03/essence-of-scala.html), higher-kinded types, and the [SI calculus for implicit resolution](https://infoscience.epfl.ch/record/229878/files/simplicitly_1.pdf). + + - [Intersection types](new-types/intersection-types.md), replacing compound types, + - [Union types](new-types/union-types.md), + - [Type lambdas](new-types/type-lambdas.md), + replacing encodings using structural types and type projection. + - [Context functions](contextual/context-functions.md) offering abstraction over given parameters. + +**Status: essential** + +These are essential core features of Scala 3. Without them, Scala 3 would be a completely different language, with different foundations. + +**Migration cost: none to low** + +Since these are additions, there's generally no migration cost for old code. An exception are intersection types which replace compound types with slightly cleaned-up semantics. But few programs would be affected by this change. + +## Simplifications + +These constructs replace existing constructs with the aim of making the language safer and simpler to use, and to promote uniformity in code style. + + - [Trait parameters](other-new-features/trait-parameters.md) replace [early initializers](dropped-features/early-initializers.md) with a more generally useful construct. + - [Given instances](contextual/givens.md) + replace implicit objects and defs, focussing on intent over mechanism. + - [Using clauses](contextual/using-clauses.md) replace implicit parameters, avoiding their ambiguities. + - [Extension methods](contextual/extension-methods.md) replace implicit classes with a clearer and simpler mechanism. + - [Opaque type aliases](other-new-features/opaques.md) replace most uses + of value classes while guaranteeing absence of boxing. + - [Top-level definitions](dropped-features/package-objects.md) replace package objects, dropping syntactic boilerplate. + - [Export clauses](other-new-features/export.md) + provide a simple and general way to express aggregation, which can replace the + previous facade pattern of package objects inheriting from classes. + - [Vararg splices](changed-features/vararg-splices.md) now use the form `*` instead of `@ _*`, mirroring vararg expressions, + - [Creator applications](other-new-features/creator-applications.md) allow using simple function call syntax + instead of `new` expressions. `new` expressions stay around as a fallback for + the cases where creator applications cannot be used. + +With the exception of early initializers and old-style vararg splices, all superseded constructs continue to be available in Scala 3.0. The plan is to deprecate and phase them out later. + +Value classes (superseded by opaque type aliases) are a special case. There are currently no deprecation plans for value classes, since we might bring them back in a more general form if they are supported natively by the JVM as is planned by project Valhalla. + +**Status: bimodal: now or never / can delay** + +These are essential simplifications. If we decide to adopt them, we should do it for 3.0. Otherwise we are faced with the awkward situation that the Scala 3 documentation has to describe an old feature that will be replaced or superseded by a simpler one in the future. + +On the other hand, we need to decide now only about the new features in this list. The decision to drop the superseded features can be delayed. Of course, adopting a new feature without deciding to drop the superseded feature will make the language larger. + +**Migration cost: moderate** + +For the next several versions, old features will remain available and deprecation and rewrite techniques can make any migration effort low and gradual. + + +## Restrictions + +These constructs are restricted to make the language safer. + + - [Implicit Conversions](contextual/conversions.md): there is only one way to define implicit conversions instead of many, and potentially surprising implicit conversions require a language import. + - [Given Imports](contextual/given-imports.md): implicits now require a special form of import, to make the import clearly visible. + - [Type Projection](dropped-features/type-projection.md): only classes can be used as prefix `C` of a type projection `C#A`. Type projection on abstract types is no longer supported since it is unsound. + - [Multiversal equality](contextual/multiversal-equality.md) implements an "opt-in" scheme to rule out nonsensical comparisons with `==` and `!=`. + - [infix](https://github.com/lampepfl/dotty/pull/5975) + makes method application syntax uniform across code bases. + +Unrestricted implicit conversions continue to be available in Scala 3.0, but will be deprecated and removed later. Unrestricted versions of the other constructs in the list above are available only under `-source 3.0-migration`. + +**Status: now or never** + +These are essential restrictions. If we decide to adopt them, we should do it for 3.0. Otherwise we are faced with the awkward situation that the Scala 3 documentation has to describe a feature that will be restricted in the future. + +**Migration cost: low to high** + + - _low_: multiversal equality rules out code that is nonsensical, so any rewrites required by its adoption should be classified as bug fixes. + - _moderate_: Restrictions to implicits can be accommodated by straightforward rewriting. + - _high_: Unrestricted type projection cannot always rewritten directly since it is unsound in general. + +## Dropped Constructs + +These constructs are proposed to be dropped without a new construct replacing them. The motivation for dropping these constructs is to simplify the language and its implementation. + + - [DelayedInit](dropped-features/delayed-init.md), + - [Existential types](dropped-features/existential-types.md), + - [Procedure syntax](dropped-features/procedure-syntax.md), + - [Class shadowing](dropped-features/class-shadowing.md), + - [XML literals](dropped-features/xml.md), + - [Symbol literals](dropped-features/symlits.md), + - [Auto application](dropped-features/auto-apply.md), + - [Weak conformance](dropped-features/weak-conformance.md), + - [Compound types](new-types/intersection-types.md), + - [Auto tupling](https://github.com/lampepfl/dotty/pull/4311) (implemented, but not merged). + +The date when these constructs are dropped varies. The current status is: + + - Not implemented at all: + - DelayedInit, existential types, weak conformance. + - Supported under `-source 3.0-migration`: + - procedure syntax, class shadowing, symbol literals, auto application, auto tupling in a restricted form. + - Supported in 3.0, to be deprecated and phased out later: + - XML literals, compound types. + +**Status: mixed** + +Currently unimplemented features would require considerable implementation effort which would in most cases make the compiler more buggy and fragile and harder to understand. If we do not decide to drop them, they will probably show up as "not yet implemented" in the Scala 3.0 release. + +Currently implemented features could stay around indefinitely. Updated docs may simply ignore them, in the expectation that they might go away eventually. So the decision about their removal can be delayed. + +**Migration cost: moderate to high** + +Dropped features require rewrites to avoid their use in programs. These rewrites can sometimes be automatic (e.g. for procedure syntax, symbol literals, auto application) +and sometimes need to be manual (e.g. class shadowing, auto tupling). Sometimes the rewrites would have to be non-local, affecting use sites as well as definition sites (e.g., in the case of `DelayedInit`, unless we find a solution). + +## Changes + +These constructs have undergone changes to make them more regular and useful. + + - [Structural Types](changed-features/structural-types.md): They now allow pluggable implementations, which greatly increases their usefulness. Some usage patterns are restricted compared to the status quo. + - [Name-based pattern matching](changed-features/pattern-matching.md): The existing undocumented Scala 2 implementation has been codified in a slightly simplified form. + - [Eta expansion](changed-features/eta-expansion.md) is now performed universally also in the absence of an expected type. The postfix `_` operator is thus made redundant. It will be deprecated and dropped after Scala 3.0. + - [Implicit Resolution](changed-features/implicit-resolution.md): The implicit resolution rules have been cleaned up to make them more useful and less surprising. Implicit scope is restricted to no longer include package prefixes. + +Most aspects of old-style implicit resolution are still available under `-source 3.0-migration`. The other changes in this list are applied unconditionally. + +**Status: strongly advisable** + +The features have been implemented in their new form in Scala 3.0's compiler. They provide clear improvements in simplicity and functionality compared to the status quo. Going back would require significant implementation effort for a net loss of functionality. + +**Migration cost: low to high** + +Only a few programs should require changes, but some necessary changes might be non-local (as in the case of restrictions to implicit scope). + +## New Constructs + +These are additions to the language that make it more powerful or pleasant to use. + + - [Enums](enums/enums.md) provide concise syntax for enumerations and [algebraic data types](enums/adts.md). + - [Parameter untupling](other-new-features/parameter-untupling.md) avoids having to use `case` for tupled parameter destructuring. + - [Dependent function types](new-types/dependent-function-types.md) generalize dependent methods to dependent function values and types. + - [Polymorphic function types](https://github.com/lampepfl/dotty/pull/4672) generalize polymorphic methods to dependent function values and types. _Current status_: There is a proposal, and a prototype implementation, but the implementation has not been finalized or merged yet. + - [Kind polymorphism](other-new-features/kind-polymorphism.md) allows the definition of operators working equally on types and type constructors. + +**Status: mixed** + +Enums offer an essential simplification of fundamental use patterns, so they should be adopted for Scala 3.0. Auto-parameter tupling is a very small change that removes some awkwardness, so it might as well be adopted now. The other features constitute more specialized functionality which could be introduced in later versions. On the other hand, except for polymorphic function types they are all fully implemented, so if the Scala 3.0 spec does not include them, they might be still made available under a language flag. + +**Migration cost: none** + +Being new features, existing code migrates without changes. To be sure, sometimes it would be attractive to rewrite code to make use of the new features in order to increase clarity and conciseness. + +## Metaprogramming + +The following constructs together aim to put metaprogramming in Scala on a new basis. So far, metaprogramming was achieved by a combination of macros and libraries such as [Shapeless](https://github.com/milessabin/shapeless) that were in turn based on some key macros. Current Scala 2 macro mechanisms are a thin veneer on top the current Scala 2 compiler, which makes them fragile and in many cases impossible to port to Scala 3. + +It's worth noting that macros were never included in the [Scala 2 language specification](https://scala-lang.org/files/archive/spec/2.13/) and were so far made available only under an `-experimental` flag. This has not prevented their widespread usage. + +To enable porting most uses of macros, we are experimenting with the advanced language constructs listed below. These designs are more provisional than the rest of the proposed language constructs for Scala 3.0. There might still be some changes until the final release. Stabilizing the feature set needed for metaprogramming is our first priority. + +- [Match types](new-types/match-types.md) allow computation on types. +- [Inline](metaprogramming/inline.md) provides +by itself a straightforward implementation of some simple macros and is at the same time an essential building block for the implementation of complex macros. +- [Quotes and splices](metaprogramming/macros.md) provide a principled way to express macros and staging with a unified set of abstractions. +- [Type class derivation](contextual/derivation.md) provides an in-language implementation of the `Gen` macro in Shapeless and other foundational libraries. The new implementation is more robust, efficient and easier to use than the macro. +- [Implicit by-name parameters](contextual/by-name-context-parameters.md) provide a more robust in-language implementation of the `Lazy` macro in Shapeless. + +**Status: not yet settled** + +We know we need a practical replacement for current macros. The features listed above are very promising in that respect, but we need more complete implementations and more use cases to reach a final verdict. + +**Migration cost: very high** + +Existing macro libraries will have to be rewritten from the ground up. In many cases the rewritten libraries will turn out to be simpler and more robust than the old ones, but that does not relieve one of the cost of the rewrites. It's currently unclear to what degree users of macro libraries will be affected. We aim to provide sufficient functionality so that core macros can be re-implemented fully, but given the vast feature set of the various macro extensions to Scala 2 it is difficult to arrive at a workable limitation of scope. + +## Changes to Type Checking and Inference + +The Scala 3 compiler uses a new algorithm for type inference, which relies on a general subtype constraint solver. The new algorithm often [works better than the old](https://contributors.scala-lang.org/t/better-type-inference-for-scala-send-us-your-problematic-cases/2410), but there are inevitably situations where the results of both algorithms differ, leading to errors diagnosed by Scala 3 for programs that the Scala 2 compiler accepts. + +**Status: essential** + +The new type-checking and inference algorithms are the essential core of the new compiler. They cannot be reverted without dropping the whole implementation of Scala 3. + +**Migration cost: high** + +Some existing programs will break and, given the complex nature of type inference, it will not always be clear what change caused the breakage and how to fix it. + +In our experience, macros and changes in type and implicit argument inference together cause the large majority of problems encountered when porting existing code to Scala 3. The latter source of problems could be addressed systematically by a tool that added all inferred types and implicit arguments to a Scala 2 source code file. Most likely such a tool would be implemented as a [Scala 2 compiler plugin](https://docs.scala-lang.org/overviews/plugins/index.html). The resulting code would have a greatly increased likelihood to compile under Scala 3, but would often be bulky to the point of being unreadable. A second part of the rewriting tool should then selectively and iteratively remove type and implicit annotations that were synthesized by the first part as long as they compile under Scala 3. This second part could be implemented as a program that invokes the Scala 3 compiler `scalac` programmatically. + +Several people have proposed such a tool for some time now. I believe it is time we find the will and the resources to actually implement it. diff --git a/docs/_spec/TODOreference/language-versions/binary-compatibility.md b/docs/_spec/TODOreference/language-versions/binary-compatibility.md new file mode 100644 index 000000000000..df1c19f97868 --- /dev/null +++ b/docs/_spec/TODOreference/language-versions/binary-compatibility.md @@ -0,0 +1,13 @@ +--- +layout: doc-page +title: "Binary Compatibility" +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html +--- + +In Scala 2 different minor versions of the compiler were free to change the way how they encode different language features in JVM bytecode so each bump of the compiler's minor version resulted in breaking binary compatibility and if a project had any Scala dependencies they all needed to be (cross-)compiled to the same minor Scala version that was used in that project itself. On the contrary, Scala 3 has a stable encoding into JVM bytecode. + +In addition to classfiles the compilation process in Scala 3 also produces files with `.tasty` extension. The [TASTy](https://docs.scala-lang.org/scala3/guides/tasty-overview.html) format is an intermediate representation of Scala code containing full information about sources together with information provided by the typer. Some of this information is lost during generation of bytecode so Scala 3 compilers read TASTy files during compilation in addition to classfiles to know the exact types of values, methods, etc. in already compiled classes (although compilation from TASTy files only is also possible). TASTy files are also typically distributed together with classfiles in published artifacts. + +TASTy format is extensible but it preserves backward compatibility and the evolution happens between minor releases of the language. This means a Scala compiler in version `3.x1.y1` is able to read TASTy files produced by another compiler in version `3.x2.y2` if `x1 >= x2` (assuming two stable versions of the compiler are considered - `SNAPSHOT` or `NIGHTLY` compiler versions can read TASTy in an older stable format but their TASTY versions are not compatible between each other even if the compilers have the same minor version; also compilers in stable versions cannot read TASTy generated by an unstable version). + +TASTy version number has the format of `.-` and the numbering changes in parallel to language releases in such a way that a bump in language minor version corresponds to a bump in TASTy minor version (e.g. for Scala `3.0.0` the TASTy version is `28.0-0`). Experimental version set to 0 signifies a stable version while others are considered unstable/experimental. TASTy version is not strictly bound to the data format itself - any changes to the API of the standard library also require a change in TASTy minor version. diff --git a/docs/_spec/TODOreference/language-versions/language-versions.md b/docs/_spec/TODOreference/language-versions/language-versions.md new file mode 100644 index 000000000000..2dfd04857cab --- /dev/null +++ b/docs/_spec/TODOreference/language-versions/language-versions.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Language Versions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/index.html +--- + +Additional information on interoperability and migration between Scala 2 and 3 can be found [here](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html). diff --git a/docs/_spec/TODOreference/language-versions/source-compatibility.md b/docs/_spec/TODOreference/language-versions/source-compatibility.md new file mode 100644 index 000000000000..4d5b468ac8f2 --- /dev/null +++ b/docs/_spec/TODOreference/language-versions/source-compatibility.md @@ -0,0 +1,43 @@ +--- +layout: doc-page +title: "Source Compatibility" +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/source-compatibility.html +--- + +Scala 3 does NOT guarantee source compatibility between different minor language versions (e.g. some syntax valid in 3.x might get deprecated and then phased out in 3.y for y > x). There are also some syntax structures that were valid in Scala 2 but are not anymore in Scala 3. However the compiler provides a possibility to specify the desired version of syntax used in a particular file or globally for a run of the compiler to make migration between versions easier. + +The default Scala language syntax version currently supported by the Dotty compiler is [`3.2`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2$.html). There are also other language versions that can be specified instead: + +- [`3.0-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/0-migration$.html): Same as +`3.0` and `3.1`, but with a Scala 2 compatibility mode that helps moving Scala 2.13 sources over to Scala 3. In particular, it + + - flags some Scala 2 constructs that are disallowed in Scala 3 as migration warnings instead of hard errors, + - changes some rules to be more lenient and backwards compatible with Scala 2.13 + - gives some additional warnings where the semantics has changed between Scala 2.13 and 3.0 + - in conjunction with `-rewrite`, offer code rewrites from Scala 2.13 to 3.0. + +- [`3.0`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/0$.html), [`3.1`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/1$.html): the default set of features included in scala versions `3.0.0` to `3.1.3`. +- [`3.2`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2$.html): the same as `3.0` and `3.1`, but in addition: + - [stricter pattern bindings](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html) are now enabled (part of `future` in earlier `3.x` releases), producing warnings for refutable patterns. These warnings can be silenced to achieve the same runtime behavior, but in `future` they become errors and refutable patterns will not compile. + - [Nonlocal returns](https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html) now produce a warning upon usage (they are still an error under `future`). +- [`3.2-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2-migration$.html): the same as `3.2`, but in conjunction with `-rewrite`, offer code rewrites from Scala `3.0/3.1` to `3.2`. +- [`future`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future$.html): A preview of changes that will be introduced in `3.x` versions after `3.2`. +Some Scala 2 specific idioms are dropped in this version. The feature set supported by this version may grow over time as features become stabilised for preview. + +- [`future-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future-migration$.html): Same as `future` but with additional helpers to migrate from `3.2`. Similarly to the helpers available under `3.0-migration`, these include migration warnings and optional rewrites. + +There are two ways to specify a language version : + +- with a `-source` command line setting, e.g. `-source 3.0-migration`. +- with a `scala.language` import at the top of a source file, e.g: + +```scala +package p +import scala.language.`future-migration` + +class C { ... } +``` + +Language imports supersede command-line settings in the source files where they are specified. Only one language import specifying a source version is allowed in a source file, and it must come before any definitions in that file. + +**Note**: The [Scala 3 Migration Guide](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html) gives further information to help the Scala programmer moving from Scala 2.13 to Scala 3. diff --git a/docs/_spec/TODOreference/metaprogramming/compiletime-ops.md b/docs/_spec/TODOreference/metaprogramming/compiletime-ops.md new file mode 100644 index 000000000000..a43c941ae943 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/compiletime-ops.md @@ -0,0 +1,294 @@ +--- +layout: doc-page +title: "Compile-time operations" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/compiletime-ops.html +--- + +## The `scala.compiletime` Package + +The [`scala.compiletime`](https://scala-lang.org/api/3.x/scala/compiletime.html) package contains helper definitions that provide support for compile-time operations over values. They are described in the following. + +### `constValue` and `constValueOpt` + +`constValue` is a function that produces the constant value represented by a +type. + +```scala +import scala.compiletime.constValue +import scala.compiletime.ops.int.S + +transparent inline def toIntC[N]: Int = + inline constValue[N] match + case 0 => 0 + case _: S[n1] => 1 + toIntC[n1] + +inline val ctwo = toIntC[2] +``` + +`constValueOpt` is the same as `constValue`, however returning an `Option[T]` +enabling us to handle situations where a value is not present. Note that `S` is +the type of the successor of some singleton type. For example the type `S[1]` is +the singleton type `2`. + +### `erasedValue` + +So far we have seen inline methods that take terms (tuples and integers) as +parameters. What if we want to base case distinctions on types instead? For +instance, one would like to be able to write a function `defaultValue`, that, +given a type `T`, returns optionally the default value of `T`, if it exists. +We can already express this using rewrite match expressions and a simple +helper function, `scala.compiletime.erasedValue`, which is defined as follows: + +```scala +def erasedValue[T]: T +``` + +The `erasedValue` function _pretends_ to return a value of its type argument `T`. +Calling this function will always result in a compile-time error unless the call +is removed from the code while inlining. + +Using `erasedValue`, we can then define `defaultValue` as follows: + +```scala +import scala.compiletime.erasedValue + +transparent inline def defaultValue[T] = + inline erasedValue[T] match + case _: Byte => Some(0: Byte) + case _: Char => Some(0: Char) + case _: Short => Some(0: Short) + case _: Int => Some(0) + case _: Long => Some(0L) + case _: Float => Some(0.0f) + case _: Double => Some(0.0d) + case _: Boolean => Some(false) + case _: Unit => Some(()) + case _ => None +``` + +Then: + +```scala +val dInt: Some[Int] = defaultValue[Int] +val dDouble: Some[Double] = defaultValue[Double] +val dBoolean: Some[Boolean] = defaultValue[Boolean] +val dAny: None.type = defaultValue[Any] +``` + +As another example, consider the type-level version of `toInt` below: +given a _type_ representing a Peano number, +return the integer _value_ corresponding to it. +Consider the definitions of numbers as in the _Inline +Match_ section above. Here is how `toIntT` can be defined: + +```scala +transparent inline def toIntT[N <: Nat]: Int = + inline scala.compiletime.erasedValue[N] match + case _: Zero.type => 0 + case _: Succ[n] => toIntT[n] + 1 + +inline val two = toIntT[Succ[Succ[Zero.type]]] +``` + +`erasedValue` is an `erased` method so it cannot be used and has no runtime +behavior. Since `toIntT` performs static checks over the static type of `N` we +can safely use it to scrutinize its return type (`S[S[Z]]` in this case). + +### `error` + +The `error` method is used to produce user-defined compile errors during inline expansion. +It has the following signature: + +```scala +inline def error(inline msg: String): Nothing +``` + +If an inline expansion results in a call `error(msgStr)` the compiler +produces an error message containing the given `msgStr`. + +```scala +import scala.compiletime.{error, codeOf} + +inline def fail() = + error("failed for a reason") + +fail() // error: failed for a reason +``` + +or + +```scala +inline def fail(inline p1: Any) = + error("failed on: " + codeOf(p1)) + +fail(identity("foo")) // error: failed on: identity[String]("foo") +``` + +### The `scala.compiletime.ops` package + +The [`scala.compiletime.ops`](https://scala-lang.org/api/3.x/scala/compiletime/ops.html) package contains types that provide support for +primitive operations on singleton types. For example, +`scala.compiletime.ops.int.*` provides support for multiplying two singleton +`Int` types, and `scala.compiletime.ops.boolean.&&` for the conjunction of two +`Boolean` types. When all arguments to a type in `scala.compiletime.ops` are +singleton types, the compiler can evaluate the result of the operation. + +```scala +import scala.compiletime.ops.int.* +import scala.compiletime.ops.boolean.* + +val conjunction: true && true = true +val multiplication: 3 * 5 = 15 +``` + +Many of these singleton operation types are meant to be used infix (as in [SLS §3.2.10](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#infix-types)). + +Since type aliases have the same precedence rules as their term-level +equivalents, the operations compose with the expected precedence rules: + +```scala +import scala.compiletime.ops.int.* +val x: 1 + 2 * 3 = 7 +``` + +The operation types are located in packages named after the type of the +left-hand side parameter: for instance, `scala.compiletime.ops.int.+` represents +addition of two numbers, while `scala.compiletime.ops.string.+` represents string +concatenation. To use both and distinguish the two types from each other, a +match type can dispatch to the correct implementation: + +```scala +import scala.compiletime.ops.* + +import scala.annotation.infix + +type +[X <: Int | String, Y <: Int | String] = (X, Y) match + case (Int, Int) => int.+[X, Y] + case (String, String) => string.+[X, Y] + +val concat: "a" + "b" = "ab" +val addition: 1 + 1 = 2 +``` + +## Summoning Implicits Selectively + +It is foreseen that many areas of typelevel programming can be done with rewrite +methods instead of implicits. But sometimes implicits are unavoidable. The +problem so far was that the Prolog-like programming style of implicit search +becomes viral: Once some construct depends on implicit search it has to be +written as a logic program itself. Consider for instance the problem of creating +a `TreeSet[T]` or a `HashSet[T]` depending on whether `T` has an `Ordering` or +not. We can create a set of implicit definitions like this: + +```scala +trait SetFor[T, S <: Set[T]] + +class LowPriority: + implicit def hashSetFor[T]: SetFor[T, HashSet[T]] = ... + +object SetsFor extends LowPriority: + implicit def treeSetFor[T: Ordering]: SetFor[T, TreeSet[T]] = ... +``` + +Clearly, this is not pretty. Besides all the usual indirection of implicit +search, we face the problem of rule prioritization where we have to ensure that +`treeSetFor` takes priority over `hashSetFor` if the element type has an +ordering. This is solved (clumsily) by putting `hashSetFor` in a superclass +`LowPriority` of the object `SetsFor` where `treeSetFor` is defined. Maybe the +boilerplate would still be acceptable if the crufty code could be contained. +However, this is not the case. Every user of the abstraction has to be +parameterized itself with a `SetFor` implicit. Considering the simple task _"I +want a `TreeSet[T]` if `T` has an ordering and a `HashSet[T]` otherwise"_, this +seems like a lot of ceremony. + +There are some proposals to improve the situation in specific areas, for +instance by allowing more elaborate schemes to specify priorities. But they all +keep the viral nature of implicit search programs based on logic programming. + +By contrast, the new `summonFrom` construct makes implicit search available +in a functional context. To solve the problem of creating the right set, one +would use it as follows: + +```scala +import scala.compiletime.summonFrom + +inline def setFor[T]: Set[T] = summonFrom { + case ord: Ordering[T] => new TreeSet[T]()(using ord) + case _ => new HashSet[T] +} +``` + +A `summonFrom` call takes a pattern matching closure as argument. All patterns +in the closure are type ascriptions of the form `identifier : Type`. + +Patterns are tried in sequence. The first case with a pattern `x: T` such that an implicit value of type `T` can be summoned is chosen. + +Alternatively, one can also use a pattern-bound given instance, which avoids the explicit using clause. For instance, `setFor` could also be formulated as follows: + +```scala +import scala.compiletime.summonFrom + +inline def setFor[T]: Set[T] = summonFrom { + case given Ordering[T] => new TreeSet[T] + case _ => new HashSet[T] +} +``` + +`summonFrom` applications must be reduced at compile time. + +Consequently, if we summon an `Ordering[String]` the code above will return a +new instance of `TreeSet[String]`. + +```scala +summon[Ordering[String]] + +println(setFor[String].getClass) // prints class scala.collection.immutable.TreeSet +``` + +**Note** `summonFrom` applications can raise ambiguity errors. Consider the following +code with two givens in scope of type `A`. The pattern match in `f` will raise +an ambiguity error of `f` is applied. + +```scala +class A +given a1: A = new A +given a2: A = new A + +inline def f: Any = summonFrom { + case given _: A => ??? // error: ambiguous givens +} +``` + +## `summonInline` + +The shorthand `summonInline` provides a simple way to write a `summon` that is delayed until the call is inlined. +Unlike `summonFrom`, `summonInline` also yields the implicit-not-found error, if a given instance of the summoned +type is not found. +```scala +import scala.compiletime.summonInline +import scala.annotation.implicitNotFound + +@implicitNotFound("Missing One") +trait Missing1 + +@implicitNotFound("Missing Two") +trait Missing2 + +trait NotMissing +given NotMissing = ??? + +transparent inline def summonInlineCheck[T <: Int](inline t : T) : Any = + inline t match + case 1 => summonInline[Missing1] + case 2 => summonInline[Missing2] + case _ => summonInline[NotMissing] + +val missing1 = summonInlineCheck(1) // error: Missing One +val missing2 = summonInlineCheck(2) // error: Missing Two +val notMissing : NotMissing = summonInlineCheck(3) +``` + +## Reference + +For more information about compile-time operations, see [PR #4768](https://github.com/lampepfl/dotty/pull/4768), +which explains how `summonFrom`'s predecessor (implicit matches) can be used for typelevel programming and code specialization and [PR #7201](https://github.com/lampepfl/dotty/pull/7201) which explains the new `summonFrom` syntax. diff --git a/docs/_spec/TODOreference/metaprogramming/inline.md b/docs/_spec/TODOreference/metaprogramming/inline.md new file mode 100644 index 000000000000..0c4800069bad --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/inline.md @@ -0,0 +1,390 @@ +--- +layout: doc-page +title: Inline +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/inline.html +--- + +## Inline Definitions + +`inline` is a new [soft modifier](../soft-modifier.md) that guarantees that a +definition will be inlined at the point of use. Example: + +```scala +object Config: + inline val logging = false + +object Logger: + + private var indent = 0 + + inline def log[T](msg: String, indentMargin: =>Int)(op: => T): T = + if Config.logging then + println(s"${" " * indent}start $msg") + indent += indentMargin + val result = op + indent -= indentMargin + println(s"${" " * indent}$msg = $result") + result + else op +end Logger +``` + +The `Config` object contains a definition of the **inline value** `logging`. +This means that `logging` is treated as a _constant value_, equivalent to its +right-hand side `false`. The right-hand side of such an `inline val` must itself +be a [constant expression](https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions). +Used in this way, `inline` is equivalent to Java and Scala 2's `final`. Note that `final`, meaning +_inlined constant_, is still supported in Scala 3, but will be phased out. + +The `Logger` object contains a definition of the **inline method** `log`. This +method will always be inlined at the point of call. + +In the inlined code, an `if-then-else` with a constant condition will be rewritten +to its `then`- or `else`-part. Consequently, in the `log` method above the +`if Config.logging` with `Config.logging == true` will get rewritten into its +`then`-part. + +Here's an example: + +```scala +var indentSetting = 2 + +def factorial(n: BigInt): BigInt = + log(s"factorial($n)", indentSetting) { + if n == 0 then 1 + else n * factorial(n - 1) + } +``` + +If `Config.logging == false`, this will be rewritten (simplified) to: + +```scala +def factorial(n: BigInt): BigInt = + if n == 0 then 1 + else n * factorial(n - 1) +``` + +As you notice, since neither `msg` or `indentMargin` were used, they do not +appear in the generated code for `factorial`. Also note the body of our `log` +method: the `else-` part reduces to just an `op`. In the generated code we do +not generate any closures because we only refer to a by-name parameter *once*. +Consequently, the code was inlined directly and the call was beta-reduced. + +In the `true` case the code will be rewritten to: + +```scala +def factorial(n: BigInt): BigInt = + val msg = s"factorial($n)" + println(s"${" " * indent}start $msg") + Logger.inline$indent_=(indent.+(indentSetting)) + val result = + if n == 0 then 1 + else n * factorial(n - 1) + Logger.inline$indent_=(indent.-(indentSetting)) + println(s"${" " * indent}$msg = $result") + result +``` + +Note that the by-value parameter `msg` is evaluated only once, per the usual Scala +semantics, by binding the value and reusing the `msg` through the body of +`factorial`. Also, note the special handling of the assignment to the private var +`indent`. It is achieved by generating a setter method `def inline$indent_=` and calling it instead. + +Inline methods always have to be fully applied. For instance, a call to +```scala +Logger.log[String]("some op", indentSetting) +``` +would be ill-formed and the compiler would complain that arguments are missing. +However, it is possible to pass wildcard arguments instead. For instance, +```scala +Logger.log[String]("some op", indentSetting)(_) +``` +would typecheck. + +### Recursive Inline Methods + +Inline methods can be recursive. For instance, when called with a constant +exponent `n`, the following method for `power` will be implemented by +straight inline code without any loop or recursion. + +```scala +inline def power(x: Double, n: Int): Double = + if n == 0 then 1.0 + else if n == 1 then x + else + val y = power(x, n / 2) + if n % 2 == 0 then y * y else y * y * x + +power(expr, 10) +// translates to +// +// val x = expr +// val y1 = x * x // ^2 +// val y2 = y1 * y1 // ^4 +// val y3 = y2 * x // ^5 +// y3 * y3 // ^10 +``` + +Parameters of inline methods can have an `inline` modifier as well. This means +that actual arguments to these parameters will be inlined in the body of the +`inline def`. `inline` parameters have call semantics equivalent to by-name parameters +but allow for duplication of the code in the argument. It is usually useful when constant +values need to be propagated to allow further optimizations/reductions. + +The following example shows the difference in translation between by-value, by-name and `inline` +parameters: + +```scala +inline def funkyAssertEquals(actual: Double, expected: =>Double, inline delta: Double): Unit = + if (actual - expected).abs > delta then + throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${delta}") + +funkyAssertEquals(computeActual(), computeExpected(), computeDelta()) +// translates to +// +// val actual = computeActual() +// def expected = computeExpected() +// if (actual - expected).abs > computeDelta() then +// throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${computeDelta()}") +``` + +### Rules for Overriding + +Inline methods can override other non-inline methods. The rules are as follows: + +1. If an inline method `f` implements or overrides another, non-inline method, the inline method can also be invoked at runtime. For instance, consider the scenario: + + ```scala + abstract class A: + def f: Int + def g: Int = f + + class B extends A: + inline def f = 22 + override inline def g = f + 11 + + val b = new B + val a: A = b + // inlined invocatons + assert(b.f == 22) + assert(b.g == 33) + // dynamic invocations + assert(a.f == 22) + assert(a.g == 33) + ``` + + The inlined invocations and the dynamically dispatched invocations give the same results. + +2. Inline methods are effectively final. + +3. Inline methods can also be abstract. An abstract inline method can be implemented only by other inline methods. It cannot be invoked directly: + + ```scala + abstract class A: + inline def f: Int + + object B extends A: + inline def f: Int = 22 + + B.f // OK + val a: A = B + a.f // error: cannot inline f in A. + ``` + +### Relationship to `@inline` + +Scala 2 also defines a `@inline` annotation which is used as a hint for the +backend to inline code. The `inline` modifier is a more powerful option: + +- expansion is guaranteed instead of best effort, +- expansion happens in the frontend instead of in the backend and +- expansion also applies to recursive methods. + + + +### The definition of constant expression + +Right-hand sides of inline values and of arguments for inline parameters must be +constant expressions in the sense defined by the [SLS §6.24](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions), +including _platform-specific_ extensions such as constant folding of pure +numeric computations. + +An inline value must have a literal type such as `1` or `true`. + +```scala +inline val four = 4 +// equivalent to +inline val four: 4 = 4 +``` + +It is also possible to have inline vals of types that do not have a syntax, such as `Short(4)`. + +```scala +trait InlineConstants: + inline val myShort: Short + +object Constants extends InlineConstants: + inline val myShort/*: Short(4)*/ = 4 +``` + +## Transparent Inline Methods + +Inline methods can additionally be declared `transparent`. +This means that the return type of the inline method can be +specialized to a more precise type upon expansion. Example: + +```scala +class A +class B extends A: + def m = true + +transparent inline def choose(b: Boolean): A = + if b then new A else new B + +val obj1 = choose(true) // static type is A +val obj2 = choose(false) // static type is B + +// obj1.m // compile-time error: `m` is not defined on `A` +obj2.m // OK +``` + +Here, the inline method `choose` returns an instance of either of the two types `A` or `B`. +If `choose` had not been declared to be `transparent`, the result +of its expansion would always be of type `A`, even though the computed value might be of the subtype `B`. +The inline method is a "blackbox" in the sense that details of its implementation do not leak out. +But if a `transparent` modifier is given, the expansion is the type of the expanded body. If the argument `b` +is `true`, that type is `A`, otherwise it is `B`. Consequently, calling `m` on `obj2` +type-checks since `obj2` has the same type as the expansion of `choose(false)`, which is `B`. +Transparent inline methods are "whitebox" in the sense that the type +of an application of such a method can be more specialized than its declared +return type, depending on how the method expands. + +In the following example, we see how the return type of `zero` is specialized to +the singleton type `0` permitting the addition to be ascribed with the correct +type `1`. + +```scala +transparent inline def zero: Int = 0 + +val one: 1 = zero + 1 +``` + +### Transparent vs. non-transparent inline + +As we already discussed, transparent inline methods may influence type checking at call site. +Technically this implies that transparent inline methods must be expanded during type checking of the program. +Other inline methods are inlined later after the program is fully typed. + +For example, the following two functions will be typed the same way but will be inlined at different times. + +```scala +inline def f1: T = ... +transparent inline def f2: T = (...): T +``` + +A noteworthy difference is the behavior of `transparent inline given`. +If there is an error reported when inlining that definition, it will be considered as an implicit search mismatch and the search will continue. +A `transparent inline given` can add a type ascription in its RHS (as in `f2` from the previous example) to avoid the precise type but keep the search behavior. +On the other hand, an `inline given` is taken as an implicit and then inlined after typing. +Any error will be emitted as usual. + +## Inline Conditionals + +An if-then-else expression whose condition is a constant expression can be simplified to +the selected branch. Prefixing an if-then-else expression with `inline` enforces that +the condition has to be a constant expression, and thus guarantees that the conditional will always +simplify. + +Example: + +```scala +inline def update(delta: Int) = + inline if delta >= 0 then increaseBy(delta) + else decreaseBy(-delta) +``` + +A call `update(22)` would rewrite to `increaseBy(22)`. But if `update` was called with +a value that was not a compile-time constant, we would get a compile time error like the one +below: + +```scala + | inline if delta >= 0 then ??? + | ^ + | cannot reduce inline if + | its condition + | delta >= 0 + | is not a constant value + | This location is in code that was inlined at ... +``` + +In a transparent inline, an `inline if` will force the inlining of any inline definition in its condition during type checking. + +## Inline Matches + +A `match` expression in the body of an `inline` method definition may be +prefixed by the `inline` modifier. If there is enough type information +at compile time to select a branch, the expression is reduced to that branch and the +type of the expression is the type of the right-hand side of that result. +If not, a compile-time error is raised that reports that the match cannot be reduced. + +The example below defines an inline method with a +single inline match expression that picks a case based on its static type: + +```scala +transparent inline def g(x: Any): Any = + inline x match + case x: String => (x, x) // Tuple2[String, String](x, x) + case x: Double => x + +g(1.0d) // Has type 1.0d which is a subtype of Double +g("test") // Has type (String, String) +``` + +The scrutinee `x` is examined statically and the inline match is reduced +accordingly returning the corresponding value (with the type specialized because `g` is declared `transparent`). +This example performs a simple type test over the scrutinee. +The type can have a richer structure like the simple ADT below. +`toInt` matches the structure of a number in [Church-encoding](https://en.wikipedia.org/wiki/Church_encoding) +and _computes_ the corresponding integer. + +```scala +trait Nat +case object Zero extends Nat +case class Succ[N <: Nat](n: N) extends Nat + +transparent inline def toInt(n: Nat): Int = + inline n match + case Zero => 0 + case Succ(n1) => toInt(n1) + 1 + +inline val natTwo = toInt(Succ(Succ(Zero))) +val intTwo: 2 = natTwo +``` + +`natTwo` is inferred to have the singleton type 2. + +## Reference + +For more information about the semantics of `inline`, see the [Scala 2020: Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) paper. diff --git a/docs/_spec/TODOreference/metaprogramming/macros-spec.md b/docs/_spec/TODOreference/metaprogramming/macros-spec.md new file mode 100644 index 000000000000..6045354fdbbc --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/macros-spec.md @@ -0,0 +1,714 @@ +--- +layout: doc-page +title: "Macros Spec" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros-spec.html +--- + +## Formalization + +* Multi-stage programming with generative and analytical macros[^2] +* Multi-Stage Macro Calculus, Chapter 4 of Scalable Metaprogramming in Scala 3[^1]. + Contains and extends the calculus of _Multi-stage programming with generative and analytical macros_ with type polymorphism. + +## Syntax + +The quotation syntax using `'` and `$` was chosen to mimic the string interpolation syntax of Scala. +Like a string double-quotation, a single-quote block can contain splices. +However, unlike strings, splices can contain quotes using the same rules. + +```scala +s" Hello $name" s" Hello ${name}" +'{ hello($name) } '{ hello(${name}) } +${ hello('name) } ${ hello('{name}) } +``` + +### Quotes +Quotes come in four flavors: quoted identifiers, quoted blocks, quoted block patterns and quoted type patterns. +Scala 2 used quoted identifiers to represent `Symbol` literals. They were deprecated in Scala 3, allowing to use them for quotation. +```scala +SimpleExpr ::= ... + | `'` alphaid // quoted identifier + | `'` `{` Block `}` // quoted block +Pattern ::= ... + | `'` `{` Block `}` // quoted block pattern + | `'` `[` Type `]` // quoted type pattern +``` + +Quoted blocks and quoted block patterns contain an expression equivalent to a normal block of code. +When entering either of those we track the fact that we are in a quoted block (`inQuoteBlock`) which is used for spliced identifiers. +When entering a quoted block pattern we additionally track the fact that we are in a quoted pattern (`inQuotePattern`) which is used to distinguish spliced blocks and splice patterns. +Lastly, the quoted type pattern simply contains a type. + +### Splices +Splices come in three flavors: spliced identifiers, spliced blocks and splice patterns. +Scala specifies identifiers containing `$` as valid identifiers but reserves them for compiler and standard library use only. +Unfortunately, many libraries have used such identifiers in Scala~2. Therefore to mitigate the cost of migration, we still support them. +We work around this by only allowing spliced identifiers[^3] within quoted blocks or quoted block patterns (`inQuoteBlock`). +Splice blocks and splice patterns can contain an arbitrary block or pattern respectively. +They are distinguished based on their surrounding quote (`inQuotePattern`), a quote block will contain spliced blocks, and a quote block pattern will contain splice patterns. + +```scala +SimpleExpr ::= ... + | `$` alphaid if inQuoteBlock // spliced identifier + | `$` `{` Block `}` if !inQuotePattern // spliced block + | `$` `{` Pattern `}` if inQuotePattern // splice pattern +``` + +### Quoted Pattern Type Variables +Quoted pattern type variables in quoted patterns and quoted type patterns do not require additional syntax. +Any type definition or reference with a name composed of lower cases is assumed to be a pattern type variable definition while typing. +A backticked type name with lower cases is interpreted as a reference to the type with that name. + + +## Implementation + +### Run-Time Representation + +The standard library defines the `Quotes` interface which contains all the logic and the abstract classes `Expr` and `Type`. +The compiler implements the `Quotes` interface and provides the implementation of `Expr` and `Type`. + +##### `class Expr` +Expressions of type `Expr[T]` are represented by the following abstract class: +```scala +abstract class Expr[+T] private[scala] +``` +The only implementation of `Expr` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps a typed AST and a `Scope` object with no methods of its own. +The `Scope` object is used to track the current splice scope and detect scope extrusions. + +##### `object Expr` +The companion object of `Expr` contains a few useful static methods; +the `apply`/`unapply` methods to use `ToExpr`/`FromExpr` with ease; +the `betaReduce` and `summon` methods. +It also contains methods to create expressions out of lists or sequences of expressions: `block`, `ofSeq`, `ofList`, `ofTupleFromSeq` and `ofTuple`. + +```scala +object Expr: + def apply[T](x: T)(using ToExpr[T])(using Quotes): Expr[T] = ... + def unapply[T](x: Expr[T])(using FromExpr[T])(using Quotes): Option[T] = ... + def betaReduce[T](e: Expr[T])(using Quotes): Expr[T] = ... + def summon[T: Type](using Quotes): Option[Expr[T]] = ... + def block[T](stats: List[Expr[Any]], e: Expr[T])(using Quotes): Expr[T] = ... + def ofSeq[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[Seq[T]] = ... + def ofList[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[List[T]] = ... + def ofTupleFromSeq(xs: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = ... + def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): + Expr[Tuple.InverseMap[T, Expr]] = ... +``` + +##### `class Type` +Types of type `Type[T]` are represented by the following abstract class: +```scala +abstract class Type[T <: AnyKind] private[scala]: + type Underlying = T +``` + +The only implementation of `Type` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps the AST of a type and a `Scope` object with no methods of its own. +The upper bound of `T` is `AnyKind` which implies that `T` may be a higher-kinded type. +The `Underlying` alias is used to select the type from an instance of `Type`. +Users never need to use this alias as they can always use `T` directly. +`Underlying` is used for internal encoding while compiling the code (see _Type Healing_). + +##### `object Type` +The companion object of `Type` contains a few useful static methods. +The first and most important one is the `Type.of` given definition. +This instance of `Type[T]` is summoned by default when no other instance is available. +The `of` operation is an intrinsic operation that the compiler will transform into code that will generate the `Type[T]` at run-time. +Secondly, the `Type.show[T]` operation will show a string representation of the type, which is often useful when debugging. +Finally, the object defines `valueOfConstant` (and `valueOfTuple`) which can transform singleton types (or tuples of singleton types) into their value. + + +```scala +object Type: + given of[T <: AnyKind](using Quotes): Type[T] = ... + def show[T <: AnyKind](using Type[T])(using Quotes): String = ... + def valueOfConstant[T](using Type[T])(using Quotes): Option[T] = ... + def valueOfTuple[T <: Tuple](using Type[T])(using Quotes): Option[T] = ... +``` + +##### `Quotes` +The `Quotes` interface is where most of the primitive operations of the quotation system are defined. + +Quotes define all the `Expr[T]` methods as extension methods. +`Type[T]` does not have methods and therefore does not appear here. +These methods are available as long as `Quotes` is implicitly given in the current scope. + +The `Quotes` instance is also the entry point to the [reflection API](./refelction.md) through the `reflect` object. + +Finally, `Quotes` provides the internal logic used in quote un-pickling (`QuoteUnpickler`) in quote pattern matching (`QuoteMatching`). +These interfaces are added to the self-type of the trait to make sure they are implemented on this object but not visible to users of `Quotes`. + +Internally, the implementation of `Quotes` will also track its current splicing scope `Scope`. +This scope will be attached to any expression that is created using this `Quotes` instance. + +```scala +trait Quotes: + this: runtime.QuoteUnpickler & runtime.QuoteMatching => + + extension [T](self: Expr[T]) + def show: String + def matches(that: Expr[Any]): Boolean + def value(using FromExpr[T]): Option[T] + def valueOrAbort(using FromExpr[T]): T + end extension + + extension (self: Expr[Any]) + def isExprOf[X](using Type[X]): Boolean + def asExprOf[X](using Type[X]): Expr[X] + end extension + + // abstract object reflect ... +``` + + +##### `Scope` +The splice context is represented as a stack (immutable list) of `Scope` objects. +Each `Scope` contains the position of the splice (used for error reporting) and a reference to the enclosing splice scope `Scope`. +A scope is a sub-scope of another if the other is contained in its parents. +This check is performed when an expression is spliced into another using the `Scope` provided in the current scope in `Quotes` and the one in the `Expr` or `Type`. + +### Entry Points +The two entry points for multi-stage programming are macros and the `run` operation. + +#### Macros +Inline macro definitions will inline a top-level splice (a splice not nested in a quote). +This splice needs to be evaluated at compile-time. +In _Avoiding a complete interpreter_[^1], we stated the following restrictions: + + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are either literal constants, quoted expressions (parameters), `Type.of` for type parameters and a reference to `Quotes`. + +These restrictions make the implementation of the interpreter quite simple. +Java Reflection is used to call the single function call in the top-level splice. +The execution of that function is entirely done on compiled bytecode. +These are Scala static methods and may not always become Java static methods, they might be inside module objects. +As modules are encoded as class instances, we need to interpret the prefix of the method to instantiate it before we can invoke the method. + +The code of the arguments has not been compiled and therefore needs to be interpreted by the compiler. +Interpreting literal constants is as simple as extracting the constant from the AST that represents literals. +When interpreting a quoted expression, the contents of the quote is kept as an AST which is wrapped inside the implementation of `Expr`. +Calls to `Type.of[T]` also wrap the AST of the type inside the implementation of `Type`. +Finally, the reference to `Quotes` is supposed to be the reference to the quotes provided by the splice. +This reference is interpreted as a new instance of `Quotes` that contains a fresh initial `Scope` with no parents. + +The result of calling the method via Java Reflection will return an `Expr` containing a new AST that was generated by the implementation of that macro. +The scope of this `Expr` is checked to make sure it did not extrude from some splice or `run` operation. +Then the AST is extracted from the `Expr` and it is inserted as replacement for the AST that contained the top-level splice. + + +#### Run-time Multi-Stage Programming + +To be able to compile the code, the `scala.quoted.staging` library defines the `Compiler` trait. +An instance of `staging.Compiler` is a wrapper over the normal Scala~3 compiler. +To be instantiated it requires an instance of the JVM _classloader_ of the application. + +```scala +import scala.quoted.staging.* +given Compiler = Compiler.make(getClass.getClassLoader) +``` + +The classloader is needed for the compiler to know which dependencies have been loaded and to load the generated code using the same classloader. + +```scala +def mkPower2()(using Quotes): Expr[Double => Double] = ... + +run(mkPower2()) +``` +To run the previous example, the compiler will create code equivalent to the following class and compile it using a new `Scope` without parents. + +```scala +class RunInstance: + def exec(): Double => Double = ${ mkPower2() } +``` +Finally, `run` will interpret `(new RunInstance).exec()` to evaluate the contents of the quote. +To do this, the resulting `RunInstance` class is loaded in the JVM using Java Reflection, instantiated and then the `exec` method is invoked. + + +### Compilation + +Quotes and splices are primitive forms in the generated typed abstract syntax trees. +These need to be type-checked with some extra rules, e.g., staging levels need to be checked and the references to generic types need to be adapted. +Finally, quoted expressions that will be generated at run-time need to be encoded (serialized) and decoded (deserialized). + +#### Typing Quoted Expressions + +The typing process for quoted expressions and splices with `Expr` is relatively straightforward. +At its core, quotes are desugared into calls to `quote`, splices are desugared into calls to `splice`. +We track the quotation level when desugaring into these methods. + + +```scala +def quote[T](x: T): Quotes ?=> Expr[T] + +def splice[T](x: Quotes ?=> Expr[T]): T +``` + +It would be impossible to track the quotation levels if users wrote calls to these methods directly. +To know if it is a call to one of those methods we would need to type it first, but to type it we would need to know if it is one of these methods to update the quotation level. +Therefore these methods can only be used by the compiler. + +At run-time, the splice needs to have a reference to the `Quotes` that created its surrounding quote. +To simplify this for later phases, we track the current `Quotes` and encode a reference directly in the splice using `nestedSplice` instead of `splice`. + +```scala +def nestedSplice[T](q: Quotes)(x: q.Nested ?=> Expr[T]): T +``` +With this addition, the original `splice` is only used for top-level splices. + +The levels are mostly used to identify top-level splices that need to be evaluated while typing. +We do not use the quotation level to influence the typing process. +Level checking is performed at a later phase. +This ensures that a source expression in a quote will have the same elaboration as a source expression outside the quote. + + + +#### Quote Pattern Matching + +Pattern matching is defined in the trait `QuoteMatching`, which is part of the self type of `Quotes`. +It is implemented by `Quotes` but not available to users of `Quotes`. +To access it, the compiler generates a cast from `Quotes` to `QuoteMatching` and then selects one of its two members: `ExprMatch` or `TypeMatch`. +`ExprMatch` defines an `unapply` extractor method that is used to encode quote patterns and `TypeMatch` defines an `unapply` method for quoted type patterns. + +```scala +trait Quotes: + self: runtime.QuoteMatching & ... => + ... + +trait QuoteMatching: + object ExprMatch: + def unapply[TypeBindings <: Tuple, Tup <: Tuple] + (scrutinee: Expr[Any]) + (using pattern: Expr[Any]): Option[Tup] = ... + object TypeMatch: + ... +``` + +These extractor methods are only meant to be used in code generated by the compiler. +The call to the extractor that is generated has an already elaborated form that cannot be written in source, namely explicit type parameters and explicit contextual parameters. + +This extractor returns a tuple type `Tup` which cannot be inferred from the types in the method signature. +This type will be computed when typing the quote pattern and will be explicitly added to the extractor call. +To refer to type variables in arbitrary places of `Tup`, we need to define them all before their use, hence we have `TypeBindings`, which will contain all pattern type variable definitions. +The extractor also receives a given parameter of type `Expr[Any]` that will contain an expression that represents the pattern. +The compiler will explicitly add this pattern expression. +We use a given parameter because these are the only parameters we are allowed to add to the extractor call in a pattern position. + +This extractor is a bit convoluted, but it encodes away all the quotation-specific features. +It compiles the pattern down into a representation that the pattern matcher compiler phase understands. + +The quote patterns are encoded into two parts: a tuple pattern that is tasked with extracting the result of the match and a quoted expression representing the pattern. +For example, if the pattern has no `$` we will have an `EmptyTuple` as the pattern and `'{1}` to represent the pattern. + +```scala + case '{ 1 } => +// is elaborated to + case ExprMatch(EmptyTuple)(using '{1}) => +// ^^^^^^^^^^ ^^^^^^^^^^ +// pattern expression +``` +When extracting expressions, each pattern that is contained in a splice `${..}` will be placed in order in the tuple pattern. +In the following case, the `f` and `x` are placed in a tuple pattern `(f, x)`. +The type of the tuple is encoded in the `Tup` and not only in the tuple itself. +Otherwise, the extractor would return a tuple `Tuple` for which the types need to be tested which is in turn not possible due to type erasure. + +```scala + case '{ ((y: Int) => $f(y)).apply($x) } => +// is elaborated to + case ExprMatch[.., (Expr[Int => Int], Expr[Int])]((f, x))(using pattern) => +// pattern = '{ ((y: Int) => pat[Int](y)).apply(pat[Int]()) } +``` +The contents of the quote are transformed into a valid quote expression by replacing the splice with a marker expression `pat[T](..)`. +The type `T` is taken from the type of the splice and the arguments are the HOAS arguments. +This implies that a `pat[T]()` is a closed pattern and `pat[T](y)` is an HOAS pattern that can refer to `y`. + + +Type variables in quoted patterns are first normalized to have all definitions at the start of the pattern. +For each definition of a type variable `t` in the pattern we will add a type variable definition in `TypeBindings`. +Each one will have a corresponding `Type[t]` that will get extracted if the pattern matches. +These `Type[t]` are also listed in the `Tup` and added in the tuple pattern. +It is additionally marked as `using` in the pattern to make it implicitly available in this case branch. + + +```scala + case '{ type t; ($xs: List[t]).map[t](identity[t]) } => +// is elaborated to + case ExprMatch[(t), (Type[t], Expr[List[t]])]((using t, xs))(using p) => +// ^^^ ^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^ ^^^^^^^ +// type bindings result type pattern expression +// p = '{ @patternType type u; pat[List[u]]().map[u](identity[u]) } +``` + +The contents of the quote are transformed into a valid quote expression by replacing type variables with fresh ones that do not escape the quote scope. +These are also annotated to be easily identifiable as pattern variables. + +#### Level Consistency Checking +Level consistency checking is performed after typing the program as a static check. +To check level consistency we traverse the tree top-down remembering the context staging level. +Each local definition in scope is recorded with its level and each term reference to a definition is checked against the current staging level. +```scala +// level 0 +'{ // level 1 + val x = ... // level 1 with (x -> 1) + ${ // level 0 (x -> 1) + val y = ... // level 0 with (x -> 1, y -> 0) + x // error: defined at level 1 but used in level 0 + } + // level 1 (x -> 1) + x // x is ok +} +``` + +#### Type Healing + +When using a generic type `T` in a future stage, it is necessary to have a given `Type[T]` in scope. +The compiler needs to identify those references and link them with the instance of `Type[T]`. +For instance consider the following example: + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[T] } +``` + +For each reference to a generic type `T` that is defined at level 0 and used at level 1 or greater, the compiler will summon a `Type[T]`. +This is usually the given type that is provided as parameter, `t` in this case. +We can use the type `t.Underlying` to replace `T` as it is an alias of that type. +But `t.Underlying` contains the extra information that it is `t` that will be used in the evaluation of the quote. +In a sense, `Underlying` acts like a splice for types. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[t.Underlying] } +``` + +Due to some technical limitations, it is not always possible to replace the type reference with the AST containing `t.Underlying`. +To overcome this limitation, we can simply define a list of type aliases at the start of the quote and insert the `t.Underlying` there. +This has the added advantage that we do not have to repeatedly insert the `t.Underlying` in the quote. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ type U = t.Underlying; List.empty[U] } +``` +These aliases can be used at any level within the quote and this transformation is only performed on quotes that are at level 0. + +```scala + '{ List.empty[T] ... '{ List.empty[T] } ... } +// becomes + '{ type U = t.Underlying; List.empty[U] ... '{ List.empty[U] } ... } +``` +If we define a generic type at level 1 or greater, it will not be subject to this transformation. +In some future compilation stage, when the definition of the generic type is at level 0, it will be subject to this transformation. +This simplifies the transformation logic and avoids leaking the encoding into code that a macro could inspect. + +```scala +'{ + def emptyList[T: Type](using Quotes): Expr[List[T]] = '{ List.empty[T] } + ... +} +``` +A similar transformation is performed on `Type.of[T]`. +Any generic type in `T` needs to have an implicitly given `Type[T]` in scope, which will also be used as a path. +The example: + +```scala +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[T] match ... +// becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[t.Underlying] match ... +// then becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + t match ... +``` + +The operation `Type.of[t.Underlying]` can be optimized to just `t`. +But this is not always the case. +If the generic reference is nested in the type, we will need to keep the `Type.of`. + +```scala +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[T]] match ... +// becomes +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[t.Underlying]] match ... +``` + +By doing this transformation, we ensure that each abstract type `U` used in `Type.of` has an implicit `Type[U]` in scope. +This representation makes it simpler to identify parts of the type that are statically known from those that are known dynamically. +Type aliases are also added within the type of the `Type.of` though these are not valid source code. +These would look like `Type.of[{type U = t.Underlying; Map[U, U]}]` if written in source code. + + +#### Splice Normalization + +The contents of a splice may refer to variables defined in the enclosing quote. +This complicates the process of serialization of the contents of the quotes. +To make serialization simple, we first transform the contents of each level 1 splice. +Consider the following example: + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ powerCode('{x}, n) } +} +``` + +The variable `x` is defined in the quote and used in the splice. +The normal form will extract all references to `x` and replace them with a staged version of `x`. +We will replace the reference to `x` of type `T` with a `$y` where `y` is of type `Expr[T]`. +Then we wrap the new contents of the splice in a lambda that defines `y` and apply it to the quoted version of `x`. +After this transformation we have 2 parts, a lambda without references to the quote, which knows how to compute the contents of the splice, and a sequence of quoted arguments that refer to variables defined in the lambda. + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ ((y: Expr[Int]) => powerCode('{$y}, n)).apply('x) } +} +``` + +In general, the splice normal form has the shape `${ .apply(*) }` and the following constraints: + * `` a lambda expression that does not refer to variables defined in the outer quote + * `` sequence of quoted expressions or `Type.of` containing references to variables defined in the enclosing quote and no references to local variables defined outside the enclosing quote + + +##### Function references normalization +A reference to a function `f` that receives parameters is not a valid value in Scala. +Such a function reference `f` can be eta-expaned as `x => f(x)` to be used as a lambda value. +Therefore function references cannot be transformed by the normalization as directly as other expressions as we cannot represent `'{f}` with a method reference type. +We can use the eta-expanded form of `f` in the normalized form. +For example, consider the reference to `f` below. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ '{ f(3)(4, 5) } } +} +``` + +To normalize this code, we can eta-expand the reference to `f` and place it in a quote containing a proper expression. +Therefore the normalized form of the argument `'{f}` becomes the quoted lambda `'{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }` and is an expression of type `Expr[Int => (Int, Int) => Int]`. +The eta-expansion produces one curried lambda per parameter list. +The application `f(3)(4, 5)` does not become `$g(3)(4, 5)` but `$g.apply(3).apply(4, 5)`. +We add the `apply` because `g` is not a quoted reference to a function but a curried lambda. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ + ( + (g: Expr[Int => (Int, Int) => Int]) => '{$g.apply(3).apply(4, 5)} + ).apply('{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }) + } +} +``` + +Then we can apply it and beta-reduce the application when generating the code. + +```scala + (g: Expr[Int => Int => Int]) => betaReduce('{$g.apply(3).apply(4)}) +``` + + +##### Variable assignment normalization +A reference to a mutable variable in the left-hand side of an assignment cannot be transformed directly as it is not in an expression position. +```scala +'{ + var x: Int = 5 + ${ g('{x = 2}) } +} +``` + +We can use the same strategy used for function references by eta-expanding the assignment operation `x = _` into `y => x = y`. + +```scala +'{ + var x: Int = 5 + ${ + g( + ( + (f: Expr[Int => Unit]) => betaReduce('{$f(2)}) + ).apply('{ (y: Int) => x = $y }) + ) + } +} +``` + + +##### Type normalization +Types defined in the quote are subject to a similar transformation. +In this example, `T` is defined within the quote at level 1 and used in the splice again at level 1. + +```scala +'{ def f[T] = ${ '{g[T]} } } +``` + +The normalization will add a `Type[T]` to the lambda, and we will insert this reference. +The difference is that it will add an alias similar to the one used in type healing. +In this example, we create a `type U` that aliases the staged type. + +```scala +'{ + def f[T] = ${ + ( + (t: Type[T]) => '{type U = t.Underling; g[U]} + ).apply(Type.of[T]) + } +} +``` + +#### Serialization + +Quoted code needs to be pickled to make it available at run-time in the next compilation phase. +We implement this by pickling the AST as a TASTy binary. + +##### TASTy +The TASTy format is the typed abstract syntax tree serialization format of Scala 3. +It usually pickles the fully elaborated code after type-checking and is kept along the generated Java classfiles. + + +##### Pickling +We use TASTy as a serialization format for the contents of the quotes. +To show how serialization is performed, we will use the following example. +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ powerCode('{x}, '{n}) } * ${ powerCode('{2}, '{n}) } +} +``` + +This quote is transformed into the following code when normalizing the splices. + +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) + } * ${ + ((m: Expr[Int]) => powerCode('{2}, m)).apply('n) + } +} +``` + +Splice normalization is a key part of the serialization process as it only allows references to variables defined in the quote in the arguments of the lambda in the splice. +This makes it possible to create a closed representation of the quote without much effort. +The first step is to remove all the splices and replace them with holes. +A hole is like a splice but it lacks the knowledge of how to compute the contents of the splice. +Instead, it knows the index of the hole and the contents of the arguments of the splice. +We can see this transformation in the following example where a hole is represented by `<< idx; holeType; args* >>`. + +```scala + ${ ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) } +// becomes + << 0; Double; x, n >> +``` + +As this was the first hole it has index 0. +The hole type is `Double`, which needs to be remembered now that we cannot infer it from the contents of the splice. +The arguments of the splice are `x` and `n`; note that they do not require quoting because they were moved out of the splice. + +References to healed types are handled in a similar way. +Consider the `emptyList` example, which shows the type aliases that are inserted into the quote. +```scala +'{ List.empty[T] } +// type healed to +'{ type U = t.Underlying; List.empty[U] } +``` +Instead of replacing a splice, we replace the `t.Underlying` type with a type hole. +The type hole is represented by `<< idx; bounds >>`. +```scala +'{ type U = << 0; Nothing..Any >>; List.empty[U] } +``` +Here, the bounds of `Nothing..Any` are the bounds of the original `T` type. +The types of a `Type.of` are transformed in the same way. + + +With these transformations, the contents of the quote or `Type.of` are guaranteed to be closed and therefore can be pickled. +The AST is pickled into TASTy, which is a sequence of bytes. +This sequence of bytes needs to be instantiated in the bytecode, but unfortunately it cannot be dumped into the classfile as bytes. +To reify it we encode the bytes into a Java `String`. +In the following examples we display this encoding in human readable form with the fictitious |tasty"..."| string literal. + +```scala +// pickled AST bytes encoded in a base64 string +tasty""" + val (x, n): (Double, Int) = (5, 2) + << 0; Double; x, n >> * << 1; Double; n >> +""" +// or +tasty""" + type U = << 0; Nothing..Any; >> + List.empty[U] +""" +``` +The contents of a quote or `Type.of` are not always pickled. +In some cases it is better to generate equivalent (smaller and/or faster) code that will compute the expression. +Literal values are compiled into a call to `Expr()` using the implementation of `ToExpr` to create the quoted expression. +This is currently performed only on literal values, but can be extended to any value for which we have a `ToExpr` defined in the standard library. +Similarly, for non-generic types we can use their respective `java.lang.Class` and convert them into a `Type` using a primitive operation `typeConstructorOf` defined in the reflection API. + +##### Unpickling + +Now that we have seen how a quote is pickled, we can look at how to unpickle it. +We will continue with the previous example. + +Holes were used to replace the splices in the quote. +When we perform this transformation we also need to remember the lambdas from the splices and their hole index. +When unpickling a hole, the corresponding splice lambda will be used to compute the contents of the hole. +The lambda will receive as parameters quoted versions of the arguments of the hole. +For example to compute the contents of `<< 0; Double; x, n >>` we will evaluate the following code + +```scala + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) +``` + +The evaluation is not as trivial as it looks, because the lambda comes from compiled code and the rest is code that must be interpreted. +We put the AST of `x` and `n` into `Expr` objects to simulate the quotes and then we use Java Reflection to call the `apply` method. + +We may have many holes in a quote and therefore as many lambdas. +To avoid the instantiation of many lambdas, we can join them together into a single lambda. +Apart from the list of arguments, this lambda will also take the index of the hole that is being evaluated. +It will perform a switch match on the index and call the corresponding lambda in each branch. +Each branch will also extract the arguments depending on the definition of the lambda. +The application of the original lambdas are beta-reduced to avoid extra overhead. + +```scala +(idx: Int, args: Seq[Any]) => + idx match + case 0 => // for << 0; Double; x, n >> + val x = args(0).asInstanceOf[Expr[Double]] + val n = args(1).asInstanceOf[Expr[Int]] + powerCode(x, n) + case 1 => // for << 1; Double; n >> + val n = args(0).asInstanceOf[Expr[Int]] + powerCode('{2}, n) +``` + +This is similar to what we do for splices when we replace the type aliased with holes we keep track of the index of the hole. +Instead of lambdas, we will have a list of references to instances of `Type`. +From the following example we would extract `t`, `u`, ... . + +```scala + '{ type T1 = t1.Underlying; type Tn = tn.Underlying; ... } +// with holes + '{ type T1 = << 0; ... >>; type Tn = << n-1; ... >>; ... } +``` + +As the type holes are at the start of the quote, they will have the first `N` indices. +This implies that we can place the references in a sequence `Seq(t, u, ...)` where the index in the sequence is the same as the hole index. + +Lastly, the quote itself is replaced by a call to `QuoteUnpickler.unpickleExpr` which will unpickle the AST, evaluate the holes, i.e., splices, and wrap the resulting AST in an `Expr[Int]`. +This method takes takes the pickled |tasty"..."|, the types and the hole lambda. +Similarly, `Type.of` is replaced with a call to `QuoteUnpickler.unpickleType` but only receives the pickled |tasty"..."| and the types. +Because `QuoteUnpickler` is part of the self-type of the `Quotes` class, we have to cast the instance but know that this cast will always succeed. + +```scala +quotes.asInstanceOf[runtime.QuoteUnpickler].unpickleExpr[T]( + pickled = tasty"...", + types = Seq(...), + holes = (idx: Int, args: Seq[Any]) => idx match ... +) +``` + +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Multi-stage programming with generative and analytical macros](https://dl.acm.org/doi/10.1145/3486609.3487203). +[^3]: In quotes, identifiers starting with `$` must be surrounded by backticks (`` `$` ``). For example `$conforms` from `scala.Predef`. diff --git a/docs/_spec/TODOreference/metaprogramming/macros.md b/docs/_spec/TODOreference/metaprogramming/macros.md new file mode 100644 index 000000000000..e39f6f1022b8 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/macros.md @@ -0,0 +1,621 @@ +--- +layout: doc-page +title: "Macros" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros.html +--- + +> When developing macros enable `-Xcheck-macros` scalac option flag to have extra runtime checks. + +## Multi-Staging + +#### Quoted expressions +Multi-stage programming in Scala 3 uses quotes `'{..}` to delay, i.e., stage, execution of code and splices `${..}` to evaluate and insert code into quotes. +Quoted expressions are typed as `Expr[T]` with a covariant type parameter `T`. +It is easy to write statically safe code generators with these two concepts. +The following example shows a naive implementation of the $x^n$ mathematical operation. + +```scala +import scala.quoted.* +def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } + else if n == 1 then x + else '{ $x * ${ unrolledPowerCode(x, n-1) } } +``` + +```scala +'{ + val x = ... + ${ unrolledPowerCode('{x}, 3) } // evaluates to: x * x * x +} +``` + +Quotes and splices are duals of each other. +For an arbitrary expression `x` of type `T` we have `${'{x}} = x` and for an arbitrary expression `e` of type `Expr[T]` we have `'{${e}} = e`. + +#### Abstract types +Quotes can handle generic and abstract types using the type class `Type[T]`. +A quote that refers to a generic or abstract type `T` requires a given `Type[T]` to be provided in the implicit scope. +The following examples show how `T` is annotated with a context bound (`: Type`) to provide an implicit `Type[T]`, or the equivalent `using Type[T]` parameter. + +```scala +import scala.quoted.* +def singletonListExpr[T: Type](x: Expr[T])(using Quotes): Expr[List[T]] = + '{ List[T]($x) } // generic T used within a quote + +def emptyListExpr[T](using Type[T], Quotes): Expr[List[T]] = + '{ List.empty[T] } // generic T used within a quote +``` + +If no other instance is found, the default `Type.of[T]` is used. +The following example implicitly uses `Type.of[String]` and `Type.of[Option[U]]`. +```scala +val list1: Expr[List[String]] = + singletonListExpr('{"hello"}) // requires a given `Type[Sting]` +val list0: Expr[List[Option[T]]] = + emptyListExpr[Option[U]] // requires a given `Type[Option[U]]` +``` + + +The `Type.of[T]` method is a primitive operation that the compiler will handle specially. +It will provide the implicit if the type `T` is statically known, or if `T` contains some other types `Ui` for which we have an implicit `Type[Ui]`. +In the example, `Type.of[String]` has a statically known type and `Type.of[Option[U]]` requires an implicit `Type[U]` in scope. + +#### Quote context +We also track the current quotation context using a given `Quotes` instance. +To create a quote `'{..}` we require a given `Quotes` context, which should be passed as a contextual parameter `(using Quotes)` to the function. +Each splice will provide a new `Quotes` context within the scope of the splice. +Therefore quotes and splices can be seen as methods with the following signatures, but with special semantics. +```scala +def '[T](x: T): Quotes ?=> Expr[T] // def '[T](x: T)(using Quotes): Expr[T] + +def $[T](x: Quotes ?=> Expr[T]): T +``` + +The lambda with a question mark `?=>` is a contextual function; it is a lambda that takes its argument implicitly and provides it implicitly in the implementation the lambda. +`Quotes` are used for a variety of purposes that will be mentioned when covering those topics. + +## Quoted Values + +#### Lifting +While it is not possible to use cross-stage persistence of local variables, it is possible to lift them to the next stage. +To this end, we provide the `Expr.apply` method, which can take a value and lift it into a quoted representation of the value. + +```scala +val expr1plus1: Expr[Int] = '{ 1 + 1 } + +val expr2: Expr[Int] = Expr(1 + 1) // lift 2 into '{ 2 } +``` + +While it looks type wise similar to `'{ 1 + 1 }`, the semantics of `Expr(1 + 1)` are quite different. +`Expr(1 + 1)` will not stage or delay any computation; the argument is evaluated to a value and then lifted into a quote. +The quote will contain code that will create a copy of this value in the next stage. +`Expr` is polymorphic and user-extensible via the `ToExpr` type class. + +```scala +trait ToExpr[T]: + def apply(x: T)(using Quotes): Expr[T] +``` + +We can implement a `ToExpr` using a `given` definition that will add the definition to the implicits in scope. +In the following example we show how to implement a `ToExpr[Option[T]]` for any liftable type `T. + +```scala +given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with + def apply(opt: Option[T])(using Quotes): Expr[Option[T]] = + opt match + case Some(x) => '{ Some[T]( ${Expr(x)} ) } + case None => '{ None } +``` + +The `ToExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. + +#### Extracting values from quotes +To be able to generate optimized code using the method `unrolledPowerCode`, the macro implementation `powerCode` needs to first +determine whether the argument passed as parameter `n` is a known constant value. +This can be achieved via _unlifting_ using the `Expr.unapply` extractor from our library implementation, which will only match if `n` is a quoted constant and extracts its value. + +```scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + n match + case Expr(m) => // it is a constant: unlift code n='{m} into number m + unrolledPowerCode(x, m) + case _ => // not known: call power at run-time + '{ power($x, $n) } +``` + +Alternatively, the `n.value` method can be used to get an `Option[Int]` with the value or `n.valueOrAbort` to get the value directly. +```scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + // emits an error message if `n` is not a constant + unrolledPowerCode(x, n.valueOrAbort) +``` + +`Expr.unapply` and all variants of `value` are polymorphic and user-extensible via a given `FromExpr` type class. + +```scala +trait FromExpr[T]: + def unapply(x: Expr[T])(using Quotes): Option[T] +``` + +We can use `given` definitions to implement the `FromExpr` as we did for `ToExpr`. +The `FromExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. +To implement `FromExpr` for non-primitive types we use quote pattern matching (for example `OptionFromExpr`). + + +## Macros and Multi-Stage Programming + +The system supports multi-stage macros and run-time multi-stage programming using the same quotation abstractions. + +### Multi-Stage Macros + +#### Macros +We can generalize the splicing abstraction to express macros. +A macro consists of a top-level splice that is not nested in any quote. +Conceptually, the contents of the splice are evaluated one stage earlier than the program. +In other words, the contents are evaluated while compiling the program. The generated code resulting from the macro replaces the splice in the program. + +```scala +def power2(x: Double): Double = + ${ unrolledPowerCode('x, 2) } // x * x +``` + +#### Inline macros +Since using the splices in the middle of a program is not as ergonomic as calling a function; we hide the staging mechanism from end-users of macros. We have a uniform way of calling macros and normal functions. +For this, _we restrict the use of top-level splices to only appear in inline methods_[^1][^2]. + +```scala +// inline macro definition +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + +// user code +def power2(x: Double): Double = + powerMacro(x, 2) // x * x +``` + +The evaluation of the macro will only happen when the code is inlined into `power2`. +When inlined, the code is equivalent to the previous definition of `power2`. +A consequence of using inline methods is that none of the arguments nor the return type of the macro will have to mention the `Expr` types; this hides all aspects of metaprogramming from the end-users. + +#### Avoiding a complete interpreter +When evaluating a top-level splice, the compiler needs to interpret the code that is within the splice. +Providing an interpreter for the entire language is quite tricky, and it is even more challenging to make that interpreter run efficiently. +To avoid needing a complete interpreter, we can impose the following restrictions on splices to simplify the evaluation of the code in top-level splices. + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are literal constants, quoted expressions (parameters), calls to `Type.of` for type parameters and a reference to `Quotes`. + +In particular, these restrictions disallow the use of splices in top-level splices. +Such a splice would require several stages of interpretation which would be unnecessarily inefficient. + +#### Compilation stages +The macro implementation (i.e., the method called in the top-level splice) can come from any pre-compiled library. +This provides a clear difference between the stages of the compilation process. +Consider the following 3 source files defined in distinct libraries. +```scala +// Macro.scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = ... +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } +``` + +```scala +// Lib.scala (depends on Macro.scala) +def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) +``` + +```scala +// App.scala (depends on Lib.scala) +@main def app() = power2(3.14) +``` +One way to syntactically visualize this is to put the application in a quote that delays the compilation of the application. +Then the application dependencies can be placed in an outer quote that contains the quoted application, and we repeat this recursively for dependencies of dependencies. + +```scala +'{ // macro library (compilation stage 1) + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + ... + inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + '{ // library using macros (compilation stage 2) + def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) + '{ power2(3.14) /* app (compilation stage 3) */ } + } +} +``` + +To make the system more versatile, we allow calling macros in the project where it is defined, with some restrictions. +For example, to compile `Macro.scala` and `Lib.scala` together in the same library. +To this end, we do not follow the simpler syntactic model and rely on semantic information from the source files. +When compiling a source, if we detect a call to a macro that is not compiled yet, we delay the compilation of this source to the following compilation stage. +In the example, we would delay the compilation of `Lib.scala` because it contains a compile-time call to `powerCode`. +Compilation stages are repeated until all sources are compiled, or no progress can be made. +If no progress is made, there was a cyclic dependency between the definition and the use of the macro. +We also need to detect if at runtime the macro depends on sources that have not been compiled yet. +These are detected by executing the macro and checking for JVM linking errors to classes that have not been compiled yet. + +### Run-Time Multi-Stage Programming + +See [Run-Time Multi-Stage Programming](./staging.md) + +## Safety + +Multi-stage programming is by design statically safe and cross-stage safe. + +### Static Safety + +#### Hygiene +All identifier names are interpreted as symbolic references to the corresponding variable in the context of the quote. +Therefore, while evaluating the quote, it is not possible to accidentally rebind a reference to a new variable with the same textual name. + +#### Well-typed +If a quote is well typed, then the generated code is well typed. +This is a simple consequence of tracking the type of each expression. +An `Expr[T]` can only be created from a quote that contains an expression of type `T. +Conversely, an `Expr[T]` can only be spliced in a location that expects a type `T. +As mentioned before, `Expr` is covariant in its type parameter. +This means that an `Expr[T]` can contain an expression of a subtype of `T. +When spliced in a location that expects a type `T, these expressions also have a valid type. + +### Cross-Stage Safety + +#### Level consistency +We define the _staging level_ of some code as the number of quotes minus the number of splices surrounding said code. +Local variables must be defined and used in the same staging level. + +It is never possible to access a local variable from a lower staging level as it does not yet exist. + +```scala +def badPower(x: Double, n: Int): Double = + ${ unrolledPowerCode('x, n) } // error: value of `n` not known yet +``` + + +In the context of macros and _cross-platform portability_, that is, +macros compiled on one machine but potentially executed on another, +we cannot support cross-stage persistence of local variables. +Therefore, local variables can only be accessed at precisely the same staging level in our system. + +```scala +def badPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + // error: `n` potentially not available in the next execution environment + '{ power($x, n) } +``` + + +The rules are slightly different for global definitions, such as `unrolledPowerCode`. +It is possible to generate code that contains a reference to a _global_ definition such as in `'{ power(2, 4) }`. +This is a limited form of cross-stage persistence that does not impede cross-platform portability, where we refer to the already compiled code for `power`. +Each compilation step will lower the staging level by one while keeping global definitions. +In consequence, we can refer to compiled definitions in macros such as `unrolledPowerCode` in `${ unrolledPowerCode('x, 2) }`. + +We can sumarize level consistency in two rules: + * Local variables can be used only at the same staging level as their definition + * Global variables can be used at any staging level + + +#### Type consistency +As Scala uses type erasure, generic types will be erased at run-time and hence in any following stage. +To ensure any quoted expression that refers to a generic type `T` does not lose the information it needs, we require a given `Type[T]` in scope. +The `Type[T]` will carry over the non-erased representation of the type into the next phase. +Therefore any generic type used at a higher staging level than its definition will require its `Type`. + +#### Scope extrusion +Within the contents of a splice, it is possible to have a quote that refers to a local variable defined in the outer quote. +If this quote is used within the splice, the variable will be in scope. +However, if the quote is somehow _extruded_ outside the splice, then variables might not be in scope anymore. +Quoted expressions can be extruded using side effects such as mutable state and exceptions. +The following example shows how a quote can be extruded using mutable state. +```scala +var x: Expr[T] = null +'{ (y: T) => ${ x = 'y; 1 } } +x // has value '{y} but y is not in scope +``` + +A second way a variable can be extruded is through the `run` method. +If `run` consumes a quoted variable reference, it will not be in scope anymore. +The result will reference a variable that is defined in the next stage. + +```scala +'{ (x: Int) => ${ run('x); ... } } +// evaluates to: '{ (x: Int) => ${ x; ... } 1 +``` + +To catch both scope extrusion scenarios, our system restricts the use of quotes by only allowing a quote to be spliced if it was not extruded from a splice scope. +Unlike level consistency, this is checked at run-time[^4] rather than compile-time to avoid making the static type system too complicated. + +Each `Quotes` instance contains a unique scope identifier and refers to its parent scope, forming a stack of identifiers. +The parent of the scope of a `Quotes` is the scope of the `Quotes` used to create the enclosing quote. +Top-level splices and `run` create new scope stacks. +Every `Expr` knows in which scope it was created. +When it is spliced, we check that the quote scope is either the same as the splice scope, or a parent scope thereof. + + +## Staged Lambdas + +When staging programs in a functional language there are two fundamental abstractions: a staged lambda `Expr[T => U]` and a staging lambda `Expr[T] => Expr[U]`. +The first is a function that will exist in the next stage, whereas the second is a function that exists in the current stage. +It is often convenient to have a mechanism to go from `Expr[T => U]` to `Expr[T] => Expr[U]` and vice versa. + +```scala +def later[T: Type, U: Type](f: Expr[T] => Expr[U]): Expr[T => U] = + '{ (x: T) => ${ f('x) } } + +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => '{ $f($x) } +``` + +Both conversions can be performed out of the box with quotes and splices. +But if `f` is a known lambda function, `'{ $f($x) }` will not beta-reduce the lambda in place. +This optimization is performed in a later phase of the compiler. +Not reducing the application immediately can simplify analysis of generated code. +Nevertheless, it is possible to beta-reduce the lambda in place using the `Expr.betaReduce` method. + +```scala +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => Expr.betaReduce('{ $f($x) }) +``` + +The `betaReduce` method will beta-reduce the outermost application of the expression if possible (regardless of arity). +If it is not possible to beta-reduce the expression, then it will return the original expression. + +## Staged Constructors +To create new class instances in a later stage, we can create them using factory methods (usually `apply` methods of an `object`), or we can instantiate them with a `new`. +For example, we can write `Some(1)` or `new Some(1)`, creating the same value. +In Scala 3, using the factory method call notation will fall back to a `new` if no `apply` method is found. +We follow the usual staging rules when calling a factory method. +Similarly, when we use a `new C`, the constructor of `C` is implicitly called, which also follows the usual staging rules. +Therefore for an arbitrary known class `C`, we can use both `'{ C(...) }` or `'{ new C(...) }` as constructors. + +## Staged Classes +Quoted code can contain any valid expression including local class definitions. +This allows the creation of new classes with specialized implementations. +For example, we can implement a new version of `Runnable` that will perform some optimized operation. +```scala +def mkRunnable(x: Int)(using Quotes): Expr[Runnable] = '{ + class MyRunnable extends Runnable: + def run(): Unit = ... // generate some custom code that uses `x` + new MyRunnable +} +``` + +The quoted class is a local class and its type cannot escape the enclosing quote. +The class must be used inside the quote or an instance of it can be returned using a known interface (`Runnable` in this case). + +## Quote Pattern Matching + +It is sometimes necessary to analyze the structure of the code or decompose the code into its sub-expressions. +A classic example is an embedded DSL, where a macro knows a set of definitions that it can reinterpret while compiling the code (for instance, to perform optimizations). +In the following example, we extend our previous implementation of `powCode` to look into `x` to perform further optimizations. + +```scala +def fusedPowCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + x match + case '{ power($y, $m) } => // we have (y^m)^n + fusedPowCode(y, '{ $n * $m }) // generate code for y^(n*m) + case _ => + '{ power($x, $n) } +``` + + +#### Sub-patterns + +In quoted patterns, the `$` binds the sub-expression to an expression `Expr` that can be used in that `case` branch. +The contents of `${..}` in a quote pattern are regular Scala patterns. +For example, we can use the `Expr(_)` pattern within the `${..}` to only match if it is a known value and extract it. + +```scala +def fusedUnrolledPowCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + x match + case '{ power($y, ${Expr(m)}) } => // we have (y^m)^n + fusedUnrolledPowCode(y, n * m) // generate code for y * ... * y + case _ => // ( n*m times ) + unrolledPowerCode(x, n) +``` + +These value extraction sub-patterns can be polymorphic using an instance of `FromExpr`. +In the following example, we show the implementation of `OptionFromExpr` which internally uses the `FromExpr[T]` to extract the value using the `Expr(x)` pattern. + +```scala +given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with + def unapply(x: Expr[Option[T]])(using Quotes): Option[Option[T]] = + x match + case '{ Some( ${Expr(x)} ) } => Some(Some(x)) + case '{ None } => Some(None) + case _ => None +``` + + + +#### Closed patterns +Patterns may contain two kinds of references: global references such as the call to the `power` method in `'{ power(...) }`, or references to bindings defined in the pattern such as `x` in `case '{ (x: Int) => x }`. +When extracting an expression from a quote, we need to ensure that we do not extrude any variable from the scope where it is defined. + +```scala +'{ (x: Int) => x + 1 } match + case '{ (y: Int) => $z } => + // should not match, otherwise: z = '{ x + 1 } +``` + +In this example, we see that the pattern should not match. +Otherwise, any use of the expression `z` would contain an unbound reference to `x`. +To avoid any such extrusion, we only match on a `${..}` if its expression is closed under the definitions within the pattern. +Therefore, the pattern will not match if the expression is not closed. + +#### HOAS patterns +To allow extracting expressions that may contain extruded references we offer a _higher-order abstract syntax_ (HOAS) pattern `$f(y)` (or `$f(y1,...,yn)`). +This pattern will eta-expand the sub-expression with respect to `y` and bind it to `f`. +The lambda arguments will replace the variables that might have been extruded. + +```scala +'{ ((x: Int) => x + 1).apply(2) } match + case '{ ((y: Int) => $f(y)).apply($z: Int) } => + // f may contain references to `x` (replaced by `$y`) + // f = (y: Expr[Int]) => '{ $y + 1 } + f(z) // generates '{ 2 + 1 } +``` + + +A HOAS pattern `$x(y1,...,yn)` will only match the expression if it does not contain references to variables defined in the pattern that are not in the set `y1,...,yn`. +In other words, the pattern will match if the expression only contains references to variables defined in the pattern that are in `y1,...,yn`. +Note that the HOAS patterns `$x()` are semantically equivalent to closed patterns `$x`. + + +#### Type variables + +Expressions may contain types that are not statically known. +For example, an `Expr[List[Int]]` may contain `list.map(_.toInt)` where `list` is a `List` of some type. +To cover all the possible cases we would need to explicitly match `list` on all possible types (`List[Int]`, `List[Int => Int]`, ...). +This is an infinite set of types and therefore pattern cases. +Even if we would know all possible types that a specific program could use, we may still end up with an unmanageable number of cases. +To overcome this, we introduce type variables in quoted patterns, which will match any type. + +In the following example, we show how type variables `t` and `u` match all possible pairs of consecutive calls to `map` on lists. +In the quoted patterns, types named with lower cases are identified as type variables. +This follows the same notation as type variables used in normal patterns. +```scala +def fuseMapCode(x: Expr[List[Int]]): Expr[List[Int]] = + x match + case '{ ($ls: List[t]).map[u]($f).map[Int]($g) } => + '{ $ls.map($g.compose($f)) } + ... + +fuseMapCode('{ List(1.2).map(f).map(g) }) // '{ List(1.2).map(g.compose(f)) } +fuseMapCode('{ List('a').map(h).map(i) }) // '{ List('a').map(i.compose(h)) } +``` +Variables `f` and `g` are inferred to be of type `Expr[t => u]` and `Expr[u => Int]` respectively. +Subsequently, we can infer `$g.compose($f)` to be of type `Expr[t => Int]` which is the type of the argument of `$ls.map(..)`. + +Type variables are abstract types that will be erased; this implies that to reference them in the second quote we need a given `Type[t]` and `Type[u]`. +The quoted pattern will implicitly provide those given types. +At run-time, when the pattern matches, the type of `t` and `u` will be known, and the `Type[t]` and `Type[u]` will contain the precise types in the expression. + +As `Expr` is covariant, the statically known type of the expression might not be the actual type. +Type variables can also be used to recover the precise type of the expression. +``scala +def let(x: Expr[Any])(using Quotes): Expr[Any] = + x match + case '{ $x: t } => + '{ val y: t = $x; y } + +let('{1}) // will return a `Expr[Any]` that contains an `Expr[Int]]` +``` + +While we can define the type variable in the middle of the pattern, their normal form is to define them as a `type` with a lower case name at the start of the pattern. +We use the Scala backquote `` `t` `` naming convention which interprets the string within the backquote as a literal name identifier. +This is typically used when we have names that contain special characters that are not allowed for normal Scala identifiers. +But we use it to explicitly state that this is a reference to that name and not the introduction of a new variable. +```scala + case '{ type t; $x: `t` } => +``` +This is a bit more verbose but has some expressivity advantages such as allowing to define bounds on the variables and be able to refer to them several times in any scope of the pattern. + +```scala + case '{ type t >: List[Int] <: Seq[Int]; $x: `t` } => + case '{ type t; $x: (`t`, `t`) } => +``` + + +#### Type patterns +It is possible to only have a type and no expression of that type. +To be able to inspect a type, we introduce quoted type pattern `case '[..] =>`. +It works the same way as a quoted pattern but is restricted to contain a type. +Type variables can be used in quoted type patterns to extract a type. + +```scala +def empty[T: Type]: Expr[T] = + Type.of[T] match + case '[String] => '{ "" } + case '[List[t]] => '{ List.empty[t] } + ... +``` + +`Type.of[T]` is used to summon the given instance of `Type[T]` in scope, it is equivalent to `summon[Type[T]]`. + +#### Type testing and casting +It is important to note that instance checks and casts on `Expr`, such as `isInstanceOf[Expr[T]]` and `asInstanceOf[Expr[T]]`, will only check if the instance is of the class `Expr` but will not be able to check the `T` argument. +These cases will issue a warning at compile-time, but if they are ignored, they can result in unexpected behavior. + +These operations can be supported correctly in the system. +For a simple type test it is possible to use the `isExprOf[T]` method of `Expr` to check if it is an instance of that type. +Similarly, it is possible to use `asExprOf[T]` to cast an expression to a given type. +These operations use a given `Type[T]` to work around type erasure. + + +## Sub-Expression Transformation + +The system provides a mechanism to transform all sub-expressions of an expression. +This is useful when the sub-expressions we want to transform are deep in the expression. +It is also necessary if the expression contains sub-expressions that cannot be matched using quoted patterns (such as local class definitions). + +```scala +trait ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] + def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + ... +``` + +Users can extend the `ExprMap` trait and implement the `transform` method. +This interface is flexible and can implement top-down, bottom-up, or other transformations. + +```scala +object OptimizeIdentity extends ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + transformChildren(e) match // bottom-up transformation + case '{ identity($x) } => x + case _ => e +``` + +The `transformChildren` method is implemented as a primitive that knows how to reach all the direct sub-expressions and calls `transform` on each one. +The type passed to `transform` is the expected type of this sub-expression in its expression. +For example while transforming `Some(1)` in `'{ val x: Option[Int] = Some(1); ...}` the type will be `Option[Int]` and not `Some[Int]`. +This implies that we can safely transform `Some(1)` into `None`. + +## Staged Implicit Summoning +When summoning implicit arguments using `summon`, we will find the given instances in the current scope. +It is possible to use `summon` to get staged implicit arguments by explicitly staging them first. +In the following example, we can pass an implicit `Ordering[T]` in a macro as an `Expr[Ordering[T]]` to its implementation. +Then we can splice it and give it implicitly in the next stage. + +```scala +inline def treeSetFor[T](using ord: Ordering[T]): Set[T] = + ${ setExpr[T](using 'ord) } + +def setExpr[T:Type](using ord: Expr[Ordering[T]])(using Quotes): Expr[Set[T]] = + '{ given Ordering[T] = $ord; new TreeSet[T]() } +``` + +We pass it as an implicit `Expr[Ordering[T]]` because there might be intermediate methods that can pass it along implicitly. + +An alternative is to summon implicit values in the scope where the macro is invoked. +Using the `Expr.summon` method we get an optional expression containing the implicit instance. +This provides the ability to search for implicit instances conditionally. + +```scala +def summon[T: Type](using Quotes): Option[Expr[T]] +``` + +```scala +inline def setFor[T]: Set[T] = + ${ setForExpr[T] } + +def setForExpr[T: Type]()(using Quotes): Expr[Set[T]] = + Expr.summon[Ordering[T]] match + case Some(ord) => + '{ new TreeSet[T]()($ord) } + case _ => + '{ new HashSet[T] } +``` + +## More details + +[More details](./macros-spec.md) + + +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) +[^3]: Implemented in the Scala 3 Dotty project https://github.com/lampepfl/dotty. sbt library dependency `"org.scala-lang" %% "scala3-staging" % scalaVersion.value` +[^4]: Using the `-Xcheck-macros` compiler flag diff --git a/docs/_spec/TODOreference/metaprogramming/metaprogramming.md b/docs/_spec/TODOreference/metaprogramming/metaprogramming.md new file mode 100644 index 000000000000..3bce2d7c922e --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/metaprogramming.md @@ -0,0 +1,47 @@ +--- +layout: index +title: "Metaprogramming" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming.html +--- + +The following pages introduce the redesign of metaprogramming in Scala. They +introduce the following fundamental facilities: + +1. [`inline`](./inline.md) is a new modifier that guarantees that + a definition will be inlined at the point of use. The primary motivation + behind inline is to reduce the overhead behind function calls and access to + values. The expansion will be performed by the Scala compiler during the + `Typer` compiler phase. As opposed to inlining in some other ecosystems, + inlining in Scala is not merely a request to the compiler but is a + _command_. The reason is that inlining in Scala can drive other compile-time + operations, like inline pattern matching (enabling type-level + programming), macros (enabling compile-time, generative, metaprogramming) and + runtime code generation (multi-stage programming). + +2. [Compile-time ops](./compiletime-ops.md) are helper definitions in the + standard library that provide support for compile-time operations over values and types. + +3. [Macros](./macros.md) are built on two well-known fundamental + operations: quotation and splicing. Quotation converts program code to + data, specifically, a (tree-like) representation of this code. It is + expressed as `'{...}` for expressions and as `'[...]` for types. Splicing, + expressed as `${ ... }`, goes the other way: it converts a program's representation + to program code. Together with `inline`, these two abstractions allow + to construct program code programmatically. + +4. [Runtime Staging](./staging.md) Where macros construct code at _compile-time_, + staging lets programs construct new code at _runtime_. That way, + code generation can depend not only on static data but also on data available at runtime. This splits the evaluation of the program in two or more phases or ... + stages. Consequently, this method of generative programming is called "Multi-Stage Programming". Staging is built on the same foundations as macros. It uses + quotes and splices, but leaves out `inline`. + +5. [Reflection](./reflection.md) Quotations are a "black-box" + representation of code. They can be parameterized and composed using + splices, but their structure cannot be analyzed from the outside. TASTy + reflection gives a way to analyze code structure by partly revealing the representation type of a piece of code in a standard API. The representation + type is a form of typed abstract syntax tree, which gives rise to the `TASTy` + moniker. + +6. [TASTy Inspection](./tasty-inspect.md) Typed abstract syntax trees are serialized + in a custom compressed binary format stored in `.tasty` files. TASTy inspection allows + to load these files and analyze their content's tree structure. diff --git a/docs/_spec/TODOreference/metaprogramming/reflection.md b/docs/_spec/TODOreference/metaprogramming/reflection.md new file mode 100644 index 000000000000..b2d492657a4e --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/reflection.md @@ -0,0 +1,131 @@ +--- +layout: doc-page +title: "Reflection" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/reflection.html +--- + +Reflection enables inspection and construction of Typed Abstract Syntax Trees +(Typed-AST). It may be used on quoted expressions (`quoted.Expr`) and quoted +types (`quoted.Type`) from [Macros](./macros.md) or on full TASTy files. + +If you are writing macros, please first read [Macros](./macros.md). +You may find all you need without using quote reflection. + +## API: From quotes and splices to TASTy reflect trees and back + +With `quoted.Expr` and `quoted.Type` we can compute code but also analyze code +by inspecting the ASTs. [Macros](./macros.md) provide the guarantee that the +generation of code will be type-correct. Using quote reflection will break these +guarantees and may fail at macro expansion time, hence additional explicit +checks must be done. + +To provide reflection capabilities in macros we need to add an implicit parameter +of type `scala.quoted.Quotes` and import `quotes.reflect.*` from it in the scope +where it is used. + +```scala +import scala.quoted.* + +inline def natConst(inline x: Int): Int = ${natConstImpl('{x})} + +def natConstImpl(x: Expr[Int])(using Quotes): Expr[Int] = + import quotes.reflect.* + ... +``` + +### Extractors + +`import quotes.reflect.*` will provide all extractors and methods on `quotes.reflect.Tree`s. +For example the `Literal(_)` extractor used below. + +```scala +def natConstImpl(x: Expr[Int])(using Quotes): Expr[Int] = + import quotes.reflect.* + val tree: Term = x.asTerm + tree match + case Inlined(_, _, Literal(IntConstant(n))) => + if n <= 0 then + report.error("Parameter must be natural number") + '{0} + else + tree.asExprOf[Int] + case _ => + report.error("Parameter must be a known constant") + '{0} +``` + +We can easily know which extractors are needed using `Printer.TreeStructure.show`, +which returns the string representation the structure of the tree. Other printers +can also be found in the `Printer` module. + +```scala +tree.show(using Printer.TreeStructure) +// or +Printer.TreeStructure.show(tree) +``` + +The methods `quotes.reflect.Term.{asExpr, asExprOf}` provide a way to go back to +a `quoted.Expr`. Note that `asExpr` returns a `Expr[Any]`. On the other hand +`asExprOf[T]` returns a `Expr[T]`, if the type does not conform to it an exception +will be thrown at runtime. + +### Positions + +The `Position` in the context provides an `ofMacroExpansion` value. It corresponds +to the expansion site for macros. The macro authors can obtain various information +about that expansion site. The example below shows how we can obtain position +information such as the start line, the end line or even the source code at the +expansion point. + +```scala +def macroImpl()(quotes: Quotes): Expr[Unit] = + import quotes.reflect.* + val pos = Position.ofMacroExpansion + + val path = pos.sourceFile.jpath.toString + val start = pos.start + val end = pos.end + val startLine = pos.startLine + val endLine = pos.endLine + val startColumn = pos.startColumn + val endColumn = pos.endColumn + val sourceCode = pos.sourceCode + ... +``` + +### Tree Utilities + +`quotes.reflect` contains three facilities for tree traversal and +transformation. + +`TreeAccumulator` ties the knot of a traversal. By calling `foldOver(x, tree)(owner)` +we can dive into the `tree` node and start accumulating values of type `X` (e.g., +of type `List[Symbol]` if we want to collect symbols). The code below, for +example, collects the `val` definitions in the tree. + +```scala +def collectPatternVariables(tree: Tree)(using ctx: Context): List[Symbol] = + val acc = new TreeAccumulator[List[Symbol]]: + def foldTree(syms: List[Symbol], tree: Tree)(owner: Symbol): List[Symbol] = tree match + case ValDef(_, _, rhs) => + val newSyms = tree.symbol :: syms + foldTree(newSyms, body)(tree.symbol) + case _ => + foldOverTree(syms, tree)(owner) + acc(Nil, tree) +``` + +A `TreeTraverser` extends a `TreeAccumulator` and performs the same traversal +but without returning any value. Finally, a `TreeMap` performs a transformation. + +#### ValDef.let + +`quotes.reflect.ValDef` also offers a method `let` that allows us to bind the `rhs` (right-hand side) to a `val` and use it in `body`. +Additionally, `lets` binds the given `terms` to names and allows to use them in the `body`. +Their type definitions are shown below: + +```scala +def let(rhs: Term)(body: Ident => Term): Term = ... + +def lets(terms: List[Term])(body: List[Term] => Term): Term = ... +``` diff --git a/docs/_spec/TODOreference/metaprogramming/simple-smp.md b/docs/_spec/TODOreference/metaprogramming/simple-smp.md new file mode 100644 index 000000000000..2ba0155ad329 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/simple-smp.md @@ -0,0 +1,232 @@ +--- +layout: doc-page +title: "The Meta-theory of Symmetric Metaprogramming" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/simple-smp.html +--- + +This note presents a simplified variant of +[principled metaprogramming](./macros.md) +and sketches its soundness proof. The variant treats only dialogues +between two stages. A program can have quotes which can contain +splices (which can contain quotes, which can contain splices, and so +on). Or the program could start with a splice with embedded +quotes. The essential restriction is that (1) a term can contain top-level +quotes or top-level splices, but not both, and (2) quotes cannot appear +directly inside quotes and splices cannot appear directly inside +splices. In other words, the universe is restricted to two phases +only. + +Under this restriction we can simplify the typing rules so that there are +always exactly two environments instead of having a stack of environments. +The variant presented here differs from the full calculus also in that we +replace evaluation contexts with contextual typing rules. While this +is more verbose, it makes it easier to set up the meta theory. + +## Syntax +``` +Terms t ::= x variable + (x: T) => t lambda + t t application + ’t quote + ~t splice + +Simple terms u ::= x | (x: T) => u | u u + +Values v ::= (x: T) => t lambda + ’u quoted value + +Types T ::= A base type + T -> T function type + ’T quoted type +``` +## Operational semantics + +### Evaluation +``` + ((x: T) => t) v --> [x := v]t + + t1 --> t2 + --------------- + t1 t --> t2 t + + t1 --> t2 + --------------- + v t1 --> v t2 + + t1 ==> t2 + ------------- + ’t1 --> ’t2 +``` + +### Splicing +``` + ~’u ==> u + + t1 ==> t2 + ------------------------------- + (x: T) => t1 ==> (x: T) => t2 + + t1 ==> t2 + --------------- + t1 t ==> t2 t + + t1 ==> t2 + --------------- + u t1 ==> u t2 + + t1 --> t2 + ------------- + ~t1 ==> ~t2 + +``` +## Typing Rules + +Typing judgments are of the form `E1 * E2 |- t: T` where `E1, E2` are environments and +`*` is one of `~` and `’`. +``` + x: T in E2 + --------------- + E1 * E2 |- x: T + + + E1 * E2, x: T1 |- t: T2 + -------------------------------- + E1 * E2 |- (x: T1) => t: T -> T2 + + + E1 * E2 |- t1: T2 -> T E1 * E2 |- t2: T2 + ------------------------------------------- + E1 * E2 |- t1 t2: T + + + E2 ’ E1 |- t: T + ----------------- + E1 ~ E2 |- ’t: ’T + + + E2 ~ E1 |- t: ’T + ---------------- + E1 ’ E2 |- ~t: T +``` + +(Curiously, this looks a bit like a Christmas tree). + +## Soundness + +The meta-theory typically requires mutual inductions over two judgments. + +### Progress Theorem + + 1. If `E1 ~ |- t: T` then either `t = v` for some value `v` or `t --> t2` for some term `t2`. + 2. If ` ’ E2 |- t: T` then either `t = u` for some simple term `u` or `t ==> t2` for some term `t2`. + +Proof by structural induction over terms. + +To prove (1): + + - the cases for variables, lambdas and applications are as in [STLC](https://en.wikipedia.org/wiki/Simply_typed_lambda_calculus). + - If `t = ’t2`, then by inversion we have ` ’ E1 |- t2: T2` for some type `T2`. + By the second [induction hypothesis](https://en.wikipedia.org/wiki/Mathematical_induction) (I.H.), we have one of: + - `t2 = u`, hence `’t2` is a value, + - `t2 ==> t3`, hence `’t2 --> ’t3`. + - The case `t = ~t2` is not typable. + +To prove (2): + + - If `t = x` then `t` is a simple term. + - If `t = (x: T) => t2`, then either `t2` is a simple term, in which case `t` is as well. + Or by the second I.H. `t2 ==> t3`, in which case `t ==> (x: T) => t3`. + - If `t = t1 t2` then one of three cases applies: + + - `t1` and `t2` are a simple term, then `t` is as well a simple term. + - `t1` is not a simple term. Then by the second I.H., `t1 ==> t12`, hence `t ==> t12 t2`. + - `t1` is a simple term but `t2` is not. Then by the second I.H. `t2 ==> t22`, hence `t ==> t1 t22`. + + - The case `t = ’t2` is not typable. + - If `t = ~t2` then by inversion we have `E2 ~ |- t2: ’T2`, for some type `T2`. + By the first I.H., we have one of + + - `t2 = v`. Since `t2: ’T2`, we must have `v = ’u`, for some simple term `u`, hence `t = ~’u`. + By quote-splice reduction, `t ==> u`. + - `t2 --> t3`. Then by the context rule for `’t`, `t ==> ’t3`. + + +### Substitution Lemma + + 1. If `E1 ~ E2 |- s: S` and `E1 ~ E2, x: S |- t: T` then `E1 ~ E2 |- [x := s]t: T`. + 2. If `E1 ~ E2 |- s: S` and `E2, x: S ’ E1 |- t: T` then `E2 ’ E1 |- [x := s]t: T`. + +The proofs are by induction on typing derivations for `t`, analogous +to the proof for STL (with (2) a bit simpler than (1) since we do not +need to swap lambda bindings with the bound variable `x`). The +arguments that link the two hypotheses are as follows. + +To prove (1), let `t = ’t1`. Then `T = ’T1` for some type `T1` and the last typing rule is +``` + E2, x: S ’ E1 |- t1: T1 + ------------------------- + E1 ~ E2, x: S |- ’t1: ’T1 +``` +By the second I.H. `E2 ’ E1 |- [x := s]t1: T1`. By typing, `E1 ~ E2 |- ’[x := s]t1: ’T1`. +Since `[x := s]t = [x := s](’t1) = ’[x := s]t1` we get `[x := s]t: ’T1`. + +To prove (2), let `t = ~t1`. Then the last typing rule is +``` + E1 ~ E2, x: S |- t1: ’T + ----------------------- + E2, x: S ’ E1 |- ~t1: T +``` +By the first I.H., `E1 ~ E2 |- [x := s]t1: ’T`. By typing, `E2 ’ E1 |- ~[x := s]t1: T`. +Since `[x := s]t = [x := s](~t1) = ~[x := s]t1` we get `[x := s]t: T`. + + +### Preservation Theorem + + 1. If `E1 ~ E2 |- t1: T` and `t1 --> t2` then `E1 ~ E2 |- t2: T`. + 2. If `E1 ’ E2 |- t1: T` and `t1 ==> t2` then `E1 ’ E2 |- t2: T`. + +The proof is by structural induction on evaluation derivations. The proof of (1) is analogous +to the proof for STL, using the substitution lemma for the beta reduction case, with the addition of reduction of quoted terms, which goes as follows: + + - Assume the last rule was + ``` + t1 ==> t2 + ------------- + ’t1 --> ’t2 + ``` + By inversion of typing rules, we must have `T = ’T1` for some type `T1` such that `t1: T1`. + By the second I.H., `t2: T1`, hence `’t2: `T1`. + + +To prove (2): + + - Assume the last rule was `~’u ==> u`. The typing proof of `~’u` must have the form + + ``` + E1 ’ E2 |- u: T + ----------------- + E1 ~ E2 |- ’u: ’T + ----------------- + E1 ’ E2 |- ~’u: T + ``` + Hence, `E1 ’ E2 |- u: T`. + + - Assume the last rule was + ``` + t1 ==> t2 + ------------------------------- + (x: S) => t1 ==> (x: T) => t2 + ``` + By typing inversion, `E1 ' E2, x: S |- t1: T1` for some type `T1` such that `T = S -> T1`. + By the I.H, `t2: T1`. By the typing rule for lambdas the result follows. + + - The context rules for applications are equally straightforward. + + - Assume the last rule was + ``` + t1 ==> t2 + ------------- + ~t1 ==> ~t2 + ``` + By inversion of typing rules, we must have `t1: ’T`. + By the first I.H., `t2: ’T`, hence `~t2: T`. diff --git a/docs/_spec/TODOreference/metaprogramming/staging.md b/docs/_spec/TODOreference/metaprogramming/staging.md new file mode 100644 index 000000000000..6d9166e8249e --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/staging.md @@ -0,0 +1,121 @@ +--- +layout: doc-page +title: "Runtime Multi-Stage Programming" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/staging.html +--- + +The framework expresses at the same time compile-time metaprogramming and +multi-stage programming. We can think of compile-time metaprogramming as a +two stage compilation process: one that we write the code in top-level splices, +that will be used for code generation (macros) and one that will perform all +necessary evaluations at compile-time and an object program that we will run +as usual. What if we could synthesize code at run-time and offer one extra stage +to the programmer? Then we can have a value of type `Expr[T]` at run-time that we +can essentially treat as a typed-syntax tree that we can either _show_ as a +string (pretty-print) or compile and run. If the number of quotes exceeds the +number of splices by more than one (effectively handling at run-time values of type +`Expr[Expr[T]]`, `Expr[Expr[Expr[T]]]`, ...) then we talk about Multi-Stage +Programming. + +The motivation behind this _paradigm_ is to let runtime information affect or +guide code-generation. + +Intuition: The phase in which code is run is determined by the difference +between the number of splice scopes and quote scopes in which it is embedded. + + - If there are more splices than quotes, the code is run at compile-time i.e. + as a macro. In the general case, this means running an interpreter that + evaluates the code, which is represented as a typed abstract syntax tree. The + interpreter can fall back to reflective calls when evaluating an application + of a previously compiled method. If the splice excess is more than one, it + would mean that a macro’s implementation code (as opposed to the code it + expands to) invokes other macros. If macros are realized by interpretation, + this would lead to towers of interpreters, where the first interpreter would + itself interpret an interpreter code that possibly interprets another + interpreter and so on. + + - If the number of splices equals the number of quotes, the code is compiled + and run as usual. + + - If the number of quotes exceeds the number of splices, the code is staged. + That is, it produces a typed abstract syntax tree or type structure at + run-time. A quote excess of more than one corresponds to multi-staged + programming. + +Providing an interpreter for the full language is quite difficult, and it is +even more difficult to make that interpreter run efficiently. So we currently +impose the following restrictions on the use of splices. + + 1. A top-level splice must appear in an inline method (turning that method + into a macro) + + 2. The splice must call a previously compiled + method passing quoted arguments, constant arguments or inline arguments. + + 3. Splices inside splices (but no intervening quotes) are not allowed. + + +## API + +The framework as discussed so far allows code to be staged, i.e. be prepared +to be executed at a later stage. To run that code, there is another method +in class `Expr` called `run`. Note that `$` and `run` both map from `Expr[T]` +to `T` but only `$` is subject to the [Cross-Stage Safety](./macros.md#cross-stage-safety), whereas `run` is just a normal method. +`scala.quoted.staging.run` provides a `Quotes` that can be used to show the expression in its scope. +On the other hand `scala.quoted.staging.withQuotes` provides a `Quotes` without evaluating the expression. + +```scala +package scala.quoted.staging + +def run[T](expr: Quotes ?=> Expr[T])(using Compiler): T = ... + +def withQuotes[T](thunk: Quotes ?=> T)(using Compiler): T = ... +``` + +## Create a new Scala 3 project with staging enabled + +```shell +sbt new scala/scala3-staging.g8 +``` + +From [`scala/scala3-staging.g8`](https://github.com/scala/scala3-staging.g8). + +It will create a project with the necessary dependencies and some examples. + +In case you prefer to create the project on your own, make sure to define the following dependency in your [`build.sbt` build definition](https://www.scala-sbt.org/1.x/docs/Basic-Def.html) + +```scala +libraryDependencies += "org.scala-lang" %% "scala3-staging" % scalaVersion.value +``` + +and in case you use `scalac`/`scala` directly, then use the `-with-compiler` flag for both: + +```shell +scalac -with-compiler -d out Test.scala +scala -with-compiler -classpath out Test +``` + +## Example + +Now take exactly the same example as in [Macros](./macros.md). Assume that we +do not want to pass an array statically but generate code at run-time and pass +the value, also at run-time. Note, how we make a future-stage function of type +`Expr[Array[Int] => Int]` in line 6 below. Using `staging.run { ... }` we can evaluate an +expression at runtime. Within the scope of `staging.run` we can also invoke `show` on an expression +to get a source-like representation of the expression. + +```scala +import scala.quoted.* + +// make available the necessary compiler for runtime code generation +given staging.Compiler = staging.Compiler.make(getClass.getClassLoader) + +val f: Array[Int] => Int = staging.run { + val stagedSum: Expr[Array[Int] => Int] = + '{ (arr: Array[Int]) => ${sum('arr)}} + println(stagedSum.show) // Prints "(arr: Array[Int]) => { var sum = 0; ... }" + stagedSum +} + +f.apply(Array(1, 2, 3)) // Returns 6 +``` diff --git a/docs/_spec/TODOreference/metaprogramming/tasty-inspect.md b/docs/_spec/TODOreference/metaprogramming/tasty-inspect.md new file mode 100644 index 000000000000..e643775243e0 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/tasty-inspect.md @@ -0,0 +1,57 @@ +--- +layout: doc-page +title: "TASTy Inspection" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/tasty-inspect.html +--- + +```scala +libraryDependencies += "org.scala-lang" %% "scala3-tasty-inspector" % scalaVersion.value +``` + +TASTy files contain the full typed tree of a class including source positions +and documentation. This is ideal for tools that analyze or extract semantic +information from the code. To avoid the hassle of working directly with the TASTy +file we provide the `Inspector` which loads the contents and exposes it +through the TASTy reflect API. + +## Inspecting TASTy files + +To inspect the trees of a TASTy file a consumer can be defined in the following way. + +```scala +import scala.quoted.* +import scala.tasty.inspector.* + +class MyInspector extends Inspector: + def inspect(using Quotes)(tastys: List[Tasty[quotes.type]]): Unit = + import quotes.reflect.* + for tasty <- tastys do + val tree = tasty.ast + // Do something with the tree +``` + +Then the consumer can be instantiated with the following code to get the tree of the `foo/Bar.tasty` file. + +```scala +object Test: + def main(args: Array[String]): Unit = + val tastyFiles = List("foo/Bar.tasty") + TastyInspector.inspectTastyFiles(tastyFiles)(new MyInspector) +``` + +Note that if we need to run the main (in the example below defined in an object called `Test`) after compilation we need to make the compiler available to the runtime: + +```shell +scalac -d out Test.scala +scala -with-compiler -classpath out Test +``` + +## Template project + +Using sbt version `1.1.5+`, do: + +```shell +sbt new scala/scala3-tasty-inspector.g8 +``` + +in the folder where you want to clone the template. diff --git a/docs/_spec/TODOreference/new-types/dependent-function-types-spec.md b/docs/_spec/TODOreference/new-types/dependent-function-types-spec.md new file mode 100644 index 000000000000..f3237ddf7b9a --- /dev/null +++ b/docs/_spec/TODOreference/new-types/dependent-function-types-spec.md @@ -0,0 +1,125 @@ +--- +layout: doc-page +title: "Dependent Function Types - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/dependent-function-types-spec.html +--- + +Initial implementation in [PR #3464](https://github.com/lampepfl/dotty/pull/3464). + +## Syntax + +``` +FunArgTypes ::= InfixType + | ‘(’ [ FunArgType {',' FunArgType } ] ‘)’ + | ‘(’ TypedFunParam {',' TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +``` + +Dependent function types associate to the right, e.g. +`(s: S) => (t: T) => U` is the same as `(s: S) => ((t: T) => U)`. + +## Implementation + +Dependent function types are shorthands for class types that define `apply` +methods with a dependent result type. Dependent function types desugar to +refinement types of `scala.FunctionN`. A dependent function type +`(x1: K1, ..., xN: KN) => R` of arity `N` translates to: + +```scala +FunctionN[K1, ..., Kn, R']: + def apply(x1: K1, ..., xN: KN): R +``` + +where the result type parameter `R'` is the least upper approximation of the +precise result type `R` without any reference to value parameters `x1, ..., xN`. + +The syntax and semantics of anonymous dependent functions is identical to the +one of regular functions. Eta expansion is naturally generalized to produce +dependent function types for methods with dependent result types. + +Dependent functions can be implicit, and generalize to arity `N > 22` in the +same way that other functions do, see +[the corresponding documentation](../dropped-features/limit22.md). + +## Examples + +The example below defines a trait `C` and the two dependent function types +`DF` and `IDF` and prints the results of the respective function applications: + +[depfuntype.scala]: https://github.com/lampepfl/dotty/blob/main/tests/pos/depfuntype.scala + +```scala +trait C { type M; val m: M } + +type DF = (x: C) => x.M + +type IDF = (x: C) ?=> x.M + +@main def test = + val c = new C { type M = Int; val m = 3 } + + val depfun: DF = (x: C) => x.m + val t = depfun(c) + println(s"t=$t") // prints "t=3" + + val idepfun: IDF = summon[C].m + val u = idepfun(using c) + println(s"u=$u") // prints "u=3" + +``` + +In the following example the depend type `f.Eff` refers to the effect type `CanThrow`: + +[eff-dependent.scala]: https://github.com/lampepfl/dotty/blob/main/tests/run/eff-dependent.scala + +```scala +trait Effect + +// Type X => Y +abstract class Fun[-X, +Y]: + type Eff <: Effect + def apply(x: X): Eff ?=> Y + +class CanThrow extends Effect +class CanIO extends Effect + +given ct: CanThrow = new CanThrow +given ci: CanIO = new CanIO + +class I2S extends Fun[Int, String]: + type Eff = CanThrow + def apply(x: Int) = x.toString + +class S2I extends Fun[String, Int]: + type Eff = CanIO + def apply(x: String) = x.length + +// def map(f: A => B)(xs: List[A]): List[B] +def map[A, B](f: Fun[A, B])(xs: List[A]): f.Eff ?=> List[B] = + xs.map(f.apply) + +// def mapFn[A, B]: (A => B) -> List[A] -> List[B] +def mapFn[A, B]: (f: Fun[A, B]) => List[A] => f.Eff ?=> List[B] = + f => xs => map(f)(xs) + +// def compose(f: A => B)(g: B => C)(x: A): C +def compose[A, B, C](f: Fun[A, B])(g: Fun[B, C])(x: A): + f.Eff ?=> g.Eff ?=> C = + g(f(x)) + +// def composeFn: (A => B) -> (B => C) -> A -> C +def composeFn[A, B, C]: + (f: Fun[A, B]) => (g: Fun[B, C]) => A => f.Eff ?=> g.Eff ?=> C = + f => g => x => compose(f)(g)(x) + +@main def test = + val i2s = new I2S + val s2i = new S2I + + assert(mapFn(i2s)(List(1, 2, 3)).mkString == "123") + assert(composeFn(i2s)(s2i)(22) == 2) +``` + +## Type Checking + +After desugaring no additional typing rules are required for dependent function types. diff --git a/docs/_spec/TODOreference/new-types/dependent-function-types.md b/docs/_spec/TODOreference/new-types/dependent-function-types.md new file mode 100644 index 000000000000..adbee1d8b3c8 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/dependent-function-types.md @@ -0,0 +1,49 @@ +--- +layout: doc-page +title: "Dependent Function Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/dependent-function-types.html +--- + +A dependent function type is a function type whose result depends +on the function's parameters. For example: + +```scala +trait Entry { type Key; val key: Key } + +def extractKey(e: Entry): e.Key = e.key // a dependent method + +val extractor: (e: Entry) => e.Key = extractKey // a dependent function value +// ^^^^^^^^^^^^^^^^^^^ +// a dependent function type +``` + +Scala already has _dependent methods_, i.e. methods where the result +type refers to some of the parameters of the method. Method +`extractKey` is an example. Its result type, `e.Key` refers to its +parameter `e` (we also say, `e.Key` _depends_ on `e`). But so far it +was not possible to turn such methods into function values, so that +they can be passed as parameters to other functions, or returned as +results. Dependent methods could not be turned into functions simply +because there was no type that could describe them. + +In Scala 3 this is now possible. The type of the `extractor` value above is + +```scala +(e: Entry) => e.Key +``` + +This type describes function values that take any argument `e` of type +`Entry` and return a result of type `e.Key`. + +Recall that a normal function type `A => B` is represented as an +instance of the [`Function1` trait](https://scala-lang.org/api/3.x/scala/Function1.html) +(i.e. `Function1[A, B]`) and analogously for functions with more parameters. Dependent functions +are also represented as instances of these traits, but they get an additional +refinement. In fact, the dependent function type above is just syntactic sugar for + +```scala +Function1[Entry, Entry#Key]: + def apply(e: Entry): e.Key +``` + +[More details](./dependent-function-types-spec.md) diff --git a/docs/_spec/TODOreference/new-types/intersection-types-spec.md b/docs/_spec/TODOreference/new-types/intersection-types-spec.md new file mode 100644 index 000000000000..346c57c004f0 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/intersection-types-spec.md @@ -0,0 +1,108 @@ +--- +layout: doc-page +title: "Intersection Types - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/intersection-types-spec.html +--- + +## Syntax + +Syntactically, the type `S & T` is an infix type, where the infix operator is `&`. +The operator `&` is a normal identifier +with the usual precedence and subject to usual resolving rules. +Unless shadowed by another definition, it resolves to the type `scala.&`, +which acts as a type alias to an internal representation of intersection types. + +``` +Type ::= ...| InfixType +InfixType ::= RefinedType {id [nl] RefinedType} +``` + +## Subtyping Rules + +``` +T <: A T <: B +---------------- + T <: A & B + + A <: T +---------------- + A & B <: T + + B <: T +---------------- + A & B <: T +``` + +From the rules above, we can show that `&` is _commutative_: `A & B <: B & A` for any type `A` and `B`. + +``` + B <: B A <: A +---------- ----------- +A & B <: B A & B <: A +--------------------------- + A & B <: B & A +``` + +In another word, `A & B` is the same type as `B & A`, in the sense that the two types +have the same values and are subtypes of each other. + +If `C` is a type constructor, then `C[A] & C[B]` can be simplified using the following three rules: + +- If `C` is covariant, `C[A] & C[B] ~> C[A & B]` +- If `C` is contravariant, `C[A] & C[B] ~> C[A | B]` +- If `C` is non-variant, emit a compile error + +When `C` is covariant, `C[A & B] <: C[A] & C[B]` can be derived: + +``` + A <: A B <: B + ---------- --------- + A & B <: A A & B <: B +--------------- ----------------- +C[A & B] <: C[A] C[A & B] <: C[B] +------------------------------------------ + C[A & B] <: C[A] & C[B] +``` + +When `C` is contravariant, `C[A | B] <: C[A] & C[B]` can be derived: + +``` + A <: A B <: B + ---------- --------- + A <: A | B B <: A | B +------------------- ---------------- +C[A | B] <: C[A] C[A | B] <: C[B] +-------------------------------------------------- + C[A | B] <: C[A] & C[B] +``` + +## Erasure + +The erased type for `S & T` is the erased _glb_ (greatest lower bound) of the +erased type of `S` and `T`. The rules for erasure of intersection types are given +below in pseudocode: + +``` +|S & T| = glb(|S|, |T|) + +glb(JArray(A), JArray(B)) = JArray(glb(A, B)) +glb(JArray(T), _) = JArray(T) +glb(_, JArray(T)) = JArray(T) +glb(A, B) = A if A extends B +glb(A, B) = B if B extends A +glb(A, _) = A if A is not a trait +glb(_, B) = B if B is not a trait +glb(A, _) = A // use first +``` + +In the above, `|T|` means the erased type of `T`, `JArray` refers to +the type of Java Array. + +See also: [`TypeErasure#erasedGlb`](https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/core/TypeErasure.scala#L289). + +## Relationship with Compound Type (`with`) + +Intersection types `A & B` replace compound types `A with B` in Scala 2. For the +moment, the syntax `A with B` is still allowed and interpreted as `A & B`, but +its usage as a type (as opposed to in a `new` or `extends` clause) will be +deprecated and removed in the future. diff --git a/docs/_spec/TODOreference/new-types/intersection-types.md b/docs/_spec/TODOreference/new-types/intersection-types.md new file mode 100644 index 000000000000..a4eedeb000f6 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/intersection-types.md @@ -0,0 +1,68 @@ +--- +layout: doc-page +title: "Intersection Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html +--- + +Used on types, the `&` operator creates an intersection type. + +## Type Checking + +The type `S & T` represents values that are of the type `S` and `T` at the same time. + +```scala +trait Resettable: + def reset(): Unit + +trait Growable[T]: + def add(t: T): Unit + +def f(x: Resettable & Growable[String]) = + x.reset() + x.add("first") +``` + +The parameter `x` is required to be _both_ a `Resettable` and a +`Growable[String]`. + +The members of an intersection type `A & B` are all the members of `A` and all +the members of `B`. For instance `Resettable & Growable[String]` +has member methods `reset` and `add`. + +`&` is _commutative_: `A & B` is the same type as `B & A`. + +If a member appears in both `A` and `B`, its type in `A & B` is the intersection +of its type in `A` and its type in `B`. For instance, assume the definitions: + +```scala +trait A: + def children: List[A] + +trait B: + def children: List[B] + +val x: A & B = new C +val ys: List[A & B] = x.children +``` + +The type of `children` in `A & B` is the intersection of `children`'s +type in `A` and its type in `B`, which is `List[A] & List[B]`. This +can be further simplified to `List[A & B]` because `List` is +covariant. + +One might wonder how the compiler could come up with a definition for +`children` of type `List[A & B]` since what is given are `children` +definitions of type `List[A]` and `List[B]`. The answer is the compiler does not +need to. `A & B` is just a type that represents a set of requirements for +values of the type. At the point where a value is _constructed_, one +must make sure that all inherited members are correctly defined. +So if one defines a class `C` that inherits `A` and `B`, one needs +to give at that point a definition of a `children` method with the required type. + +```scala +class C extends A, B: + def children: List[A & B] = ??? +``` + + +[More details](./intersection-types-spec.md) diff --git a/docs/_spec/TODOreference/new-types/match-types.md b/docs/_spec/TODOreference/new-types/match-types.md new file mode 100644 index 000000000000..d646dd11880b --- /dev/null +++ b/docs/_spec/TODOreference/new-types/match-types.md @@ -0,0 +1,247 @@ +--- +layout: doc-page +title: "Match Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/match-types.html +--- + +A match type reduces to one of its right-hand sides, depending on the type of +its scrutinee. For example: + +```scala +type Elem[X] = X match + case String => Char + case Array[t] => t + case Iterable[t] => t +``` + +This defines a type that reduces as follows: + +```scala +Elem[String] =:= Char +Elem[Array[Int]] =:= Int +Elem[List[Float]] =:= Float +Elem[Nil.type] =:= Nothing +``` + +Here `=:=` is understood to mean that left and right-hand sides are mutually +subtypes of each other. + +In general, a match type is of the form + +```scala +S match { P1 => T1 ... Pn => Tn } +``` + +where `S`, `T1`, ..., `Tn` are types and `P1`, ..., `Pn` are type patterns. Type +variables in patterns start with a lower case letter, as usual. + +Match types can form part of recursive type definitions. Example: + +```scala +type LeafElem[X] = X match + case String => Char + case Array[t] => LeafElem[t] + case Iterable[t] => LeafElem[t] + case AnyVal => X +``` + +Recursive match type definitions can also be given an upper bound, like this: + +```scala +type Concat[Xs <: Tuple, +Ys <: Tuple] <: Tuple = Xs match + case EmptyTuple => Ys + case x *: xs => x *: Concat[xs, Ys] +``` + +In this definition, every instance of `Concat[A, B]`, whether reducible or not, +is known to be a subtype of `Tuple`. This is necessary to make the recursive +invocation `x *: Concat[xs, Ys]` type check, since `*:` demands a `Tuple` as its +right operand. + +## Dependent Typing + +Match types can be used to define dependently typed methods. For instance, here +is the value level counterpart to the `LeafElem` type defined above (note the +use of the match type as the return type): + +```scala +def leafElem[X](x: X): LeafElem[X] = x match + case x: String => x.charAt(0) + case x: Array[t] => leafElem(x(0)) + case x: Iterable[t] => leafElem(x.head) + case x: AnyVal => x +``` + +This special mode of typing for match expressions is only used when the +following conditions are met: + +1. The match expression patterns do not have guards +2. The match expression scrutinee's type is a subtype of the match type + scrutinee's type +3. The match expression and the match type have the same number of cases +4. The match expression patterns are all [Typed Patterns](https://scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#typed-patterns), + and these types are `=:=` to their corresponding type patterns in the match + type + +So you know, while the case body will be expected to have the type on the right-hand +side of the corresponding match type case, that doesn't imply the match type argument +is constrained. Using the example, the last case body must conform to X, but that +doesn't constrain X to be AnyVal, and therefore a LeafElem[X] inside the body wouldn't +reduce; it would remain stuck, and as such just an abstract type. + +## Representation of Match Types + +The internal representation of a match type +``` +S match { P1 => T1 ... Pn => Tn } +``` +is `Match(S, C1, ..., Cn) <: B` where each case `Ci` is of the form +``` +[Xs] =>> P => T +``` + +Here, `[Xs]` is a type parameter clause of the variables bound in pattern `Pi`. +If there are no bound type variables in a case, the type parameter clause is +omitted and only the function type `P => T` is kept. So each case is either a +unary function type or a type lambda over a unary function type. + +`B` is the declared upper bound of the match type, or `Any` if no such bound is +given. We will leave it out in places where it does not matter for the +discussion. The scrutinee, bound, and pattern types must all be first-order +types. + +## Match Type Reduction + +Match type reduction follows the semantics of match expressions, that is, a +match type of the form `S match { P1 => T1 ... Pn => Tn }` reduces to `Ti` if +and only if `s: S match { _: P1 => T1 ... _: Pn => Tn }` evaluates to a value of +type `Ti` for all `s: S`. + +The compiler implements the following reduction algorithm: + +- If the scrutinee type `S` is an empty set of values (such as `Nothing` or + `String & Int`), do not reduce. +- Sequentially consider each pattern `Pi` + - If `S <: Pi` reduce to `Ti`. + - Otherwise, try constructing a proof that `S` and `Pi` are disjoint, or, in + other words, that no value `s` of type `S` is also of type `Pi`. + - If such proof is found, proceed to the next case (`Pi+1`), otherwise, do + not reduce. + +Disjointness proofs rely on the following properties of Scala types: + +1. Single inheritance of classes +2. Final classes cannot be extended +3. Constant types with distinct values are nonintersecting +4. Singleton paths to distinct values are nonintersecting, such as `object` definitions or singleton enum cases. + +Type parameters in patterns are minimally instantiated when computing `S <: Pi`. +An instantiation `Is` is _minimal_ for `Xs` if all type variables in `Xs` that +appear covariantly and nonvariantly in `Is` are as small as possible and all +type variables in `Xs` that appear contravariantly in `Is` are as large as +possible. Here, "small" and "large" are understood with respect to `<:`. + +For simplicity, we have omitted constraint handling so far. The full formulation +of subtyping tests describes them as a function from a constraint and a pair of +types to either _success_ and a new constraint or _failure_. In the context of +reduction, the subtyping test `S <: [Xs := Is] P` is understood to leave the +bounds of all variables in the input constraint unchanged, i.e. existing +variables in the constraint cannot be instantiated by matching the scrutinee +against the patterns. + +## Subtyping Rules for Match Types + +The following rules apply to match types. For simplicity, we omit environments +and constraints. + +1. The first rule is a structural comparison between two match types: + + ``` + S match { P1 => T1 ... Pm => Tm } <: T match { Q1 => U1 ... Qn => Un } + ``` + + if + + ``` + S =:= T, m >= n, Pi =:= Qi and Ti <: Ui for i in 1..n + ``` + + I.e. scrutinees and patterns must be equal and the corresponding bodies must + be subtypes. No case re-ordering is allowed, but the subtype can have more + cases than the supertype. + +2. The second rule states that a match type and its redux are mutual subtypes. + + ``` + S match { P1 => T1 ... Pn => Tn } <: U + U <: S match { P1 => T1 ... Pn => Tn } + ``` + + if + + `S match { P1 => T1 ... Pn => Tn }` reduces to `U` + +3. The third rule states that a match type conforms to its upper bound: + + ``` + (S match { P1 => T1 ... Pn => Tn } <: B) <: B + ``` + +## Termination + +Match type definitions can be recursive, which means that it's possible to run +into an infinite loop while reducing match types. + +Since reduction is linked to subtyping, we already have a cycle detection +mechanism in place. As a result, the following will already give a reasonable +error message: + +```scala +type L[X] = X match + case Int => L[X] + +def g[X]: L[X] = ??? +``` + +```scala + | val x: Int = g[Int] + | ^ + |Recursion limit exceeded. + |Maybe there is an illegal cyclic reference? + |If that's not the case, you could also try to + |increase the stacksize using the -Xss JVM option. + |A recurring operation is (inner to outer): + | + | subtype LazyRef(Test.L[Int]) <:< Int +``` + +Internally, the Scala compiler detects these cycles by turning selected stack overflows into +type errors. If there is a stack overflow during subtyping, the exception will +be caught and turned into a compile-time error that indicates a trace of the +subtype tests that caused the overflow without showing a full stack trace. + + +## Match Types Variance + +All type positions in a match type (scrutinee, patterns, bodies) are considered invariant. + +## Related Work + +Match types have similarities with +[closed type families](https://wiki.haskell.org/GHC/Type_families) in Haskell. +Some differences are: + +- Subtyping instead of type equalities. +- Match type reduction does not tighten the underlying constraint, whereas type + family reduction does unify. This difference in approach mirrors the + difference between local type inference in Scala and global type inference in + Haskell. + +Match types are also similar to Typescript's +[conditional types](https://github.com/Microsoft/TypeScript/pull/21316). The +main differences here are: + + - Conditional types only reduce if both the scrutinee and pattern are ground, + whereas match types also work for type parameters and abstract types. + - Match types support direct recursion. + - Conditional types distribute through union types. diff --git a/docs/_spec/TODOreference/new-types/new-types.md b/docs/_spec/TODOreference/new-types/new-types.md new file mode 100644 index 000000000000..84c157495d6f --- /dev/null +++ b/docs/_spec/TODOreference/new-types/new-types.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "New Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/index.html +--- + +This chapter documents the new types introduced in Scala 3. diff --git a/docs/_spec/TODOreference/new-types/polymorphic-function-types.md b/docs/_spec/TODOreference/new-types/polymorphic-function-types.md new file mode 100644 index 000000000000..1754bf844831 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/polymorphic-function-types.md @@ -0,0 +1,94 @@ +--- +layout: doc-page +title: "Polymorphic Function Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/polymorphic-function-types.html +--- + +A polymorphic function type is a function type which accepts type parameters. +For example: + +```scala +// A polymorphic method: +def foo[A](xs: List[A]): List[A] = xs.reverse + +// A polymorphic function value: +val bar: [A] => List[A] => List[A] +// ^^^^^^^^^^^^^^^^^^^^^^^^^ +// a polymorphic function type + = [A] => (xs: List[A]) => foo[A](xs) +``` + +Scala already has _polymorphic methods_, i.e. methods which accepts type parameters. +Method `foo` above is an example, accepting a type parameter `A`. +So far, it +was not possible to turn such methods into polymorphic function values like `bar` above, +which can be passed as parameters to other functions, or returned as results. + +In Scala 3 this is now possible. The type of the `bar` value above is + +```scala +[A] => List[A] => List[A] +``` + +This type describes function values which take a type `A` as a parameter, +then take a list of type `List[A]`, and return a list of the same type `List[A]`. + +[More details](https://github.com/lampepfl/dotty/pull/4672) + + +## Example Usage + +Polymorphic function type are particularly useful +when callers of a method are required to provide a +function which has to be polymorphic, +meaning that it should accept arbitrary types as part of its inputs. + +For instance, consider the situation where we have +a data type to represent the expressions of a simple language +(consisting only of variables and function applications) +in a strongly-typed way: + +```scala +enum Expr[A]: + case Var(name: String) + case Apply[A, B](fun: Expr[B => A], arg: Expr[B]) extends Expr[A] +``` + +We would like to provide a way for users to map a function +over all immediate subexpressions of a given `Expr`. +This requires the given function to be polymorphic, +since each subexpression may have a different type. +Here is how to implement this using polymorphic function types: + +```scala +def mapSubexpressions[A](e: Expr[A])(f: [B] => Expr[B] => Expr[B]): Expr[A] = + e match + case Apply(fun, arg) => Apply(f(fun), f(arg)) + case Var(n) => Var(n) +``` + +And here is how to use this function to _wrap_ each subexpression +in a given expression with a call to some `wrap` function, +defined as a variable: + +```scala +val e0 = Apply(Var("f"), Var("a")) +val e1 = mapSubexpressions(e0)( + [B] => (se: Expr[B]) => Apply(Var[B => B]("wrap"), se)) +println(e1) // Apply(Apply(Var(wrap),Var(f)),Apply(Var(wrap),Var(a))) +``` + +## Relationship With Type Lambdas + +Polymorphic function types are not to be confused with +[_type lambdas_](type-lambdas.md). +While the former describes the _type_ of a polymorphic _value_, +the latter is an actual function value _at the type level_. + +A good way of understanding the difference is to notice that +**_type lambdas are applied in types, +whereas polymorphic functions are applied in terms_**: +One would call the function `bar` above +by passing it a type argument `bar[Int]` _within a method body_. +On the other hand, given a type lambda such as `type F = [A] =>> List[A]`, +one would call `F` _within a type expression_, as in `type Bar = F[Int]`. diff --git a/docs/_spec/TODOreference/new-types/type-lambdas-spec.md b/docs/_spec/TODOreference/new-types/type-lambdas-spec.md new file mode 100644 index 000000000000..52f88dab4217 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/type-lambdas-spec.md @@ -0,0 +1,116 @@ +--- +layout: doc-page +title: "Type Lambdas - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/type-lambdas-spec.html +--- + +## Syntax + +``` +Type ::= ... | TypeParamClause ‘=>>’ Type +TypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ +TypeParam ::= {Annotation} (id [HkTypeParamClause] | ‘_’) TypeBounds +TypeBounds ::= [‘>:’ Type] [‘<:’ Type] +``` + +## Type Checking + +A type lambda such as `[X] =>> F[X]` defines a function from types to types. The parameter(s) may carry bounds. +If a parameter is bounded, as in `[X >: L <: U] =>> F[X]` it is checked that arguments to the parameters conform to the bounds `L` and `U`. +Only the upper bound `U` can be F-bounded, i.e. `X` can appear in it. + +## Subtyping Rules + +Assume two type lambdas +```scala +type TL1 = [X >: L1 <: U1] =>> R1 +type TL2 = [X >: L2 <: U2] =>> R2 +``` +Then `TL1 <: TL2`, if + + - the type interval `L2..U2` is contained in the type interval `L1..U1` (i.e. +`L1 <: L2` and `U2 <: U1`), + - `R1 <: R2` + +Here we have relied on [alpha renaming](https://en.wikipedia.org/wiki/Lambda_calculus#%CE%B1-conversion) to match the two bound types `X`. + +A partially applied type constructor such as `List` is assumed to be equivalent to +its eta expansion. I.e, `List = [X] =>> List[X]`. This allows type constructors to be compared with type lambdas. + +## Relationship with Parameterized Type Definitions + +A parameterized type definition +```scala +type T[X] = R +``` +is regarded as a shorthand for an unparameterized definition with a type lambda as right-hand side: +```scala +type T = [X] =>> R +``` +If the type definition carries `+` or `-` variance annotations, +it is checked that the variance annotations are satisfied by the type lambda. +For instance, +```scala +type F2[A, +B] = A => B +``` +expands to +```scala +type F2 = [A, B] =>> A => B +``` +and at the same time it is checked that the parameter `B` appears covariantly in `A => B`. + +A parameterized abstract type +```scala +type T[X] >: L <: U +``` +is regarded as shorthand for an unparameterized abstract type with type lambdas as bounds. +```scala +type T >: ([X] =>> L) <: ([X] =>> U) +``` +However, if `L` is `Nothing` it is not parameterized, since `Nothing` is treated as a bottom type for all kinds. For instance, +```scala +type T[X] <: X => X +``` +is expanded to +```scala +type T >: Nothing <: ([X] =>> X => X) +``` +instead of +```scala +type T >: ([X] =>> Nothing) <: ([X] =>> X => X) +``` + +The same expansions apply to type parameters. For instance, +```scala +[F[X] <: Coll[X]] +``` +is treated as a shorthand for +```scala +[F >: Nothing <: [X] =>> Coll[X]] +``` +Abstract types and opaque type aliases remember the variances they were created with. So the type +```scala +type F2[-A, +B] +``` +is known to be contravariant in `A` and covariant in `B` and can be instantiated only +with types that satisfy these constraints. Likewise +```scala +opaque type O[X] = List[X] +``` +`O` is known to be invariant (and not covariant, as its right-hand side would suggest). On the other hand, a transparent alias +```scala +type O2[X] = List[X] +``` +would be treated as covariant, `X` is used covariantly on its right-hand side. + +**Note**: The decision to treat `Nothing` as universal bottom type is provisional, and might be changed after further discussion. + +**Note**: Scala 2 and 3 differ in that Scala 2 also treats `Any` as universal top-type. This is not done in Scala 3. See also the discussion on [kind polymorphism](../other-new-features/kind-polymorphism.md) + +## Curried Type Parameters + +The body of a type lambda can again be a type lambda. Example: +```scala +type TL = [X] =>> [Y] =>> (X, Y) +``` +Currently, no special provision is made to infer type arguments to such curried type lambdas. This is left for future work. diff --git a/docs/_spec/TODOreference/new-types/type-lambdas.md b/docs/_spec/TODOreference/new-types/type-lambdas.md new file mode 100644 index 000000000000..ba88e28f5d56 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/type-lambdas.md @@ -0,0 +1,17 @@ +--- +layout: doc-page +title: "Type Lambdas" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/type-lambdas.html +--- + +A _type lambda_ lets one express a higher-kinded type directly, without +a type definition. + +```scala +[X, Y] =>> Map[Y, X] +``` + +For instance, the type above defines a binary type constructor, which maps arguments `X` and `Y` to `Map[Y, X]`. +Type parameters of type lambdas can have bounds, but they cannot carry `+` or `-` variance annotations. + +[More details](./type-lambdas-spec.md) diff --git a/docs/_spec/TODOreference/new-types/union-types-spec.md b/docs/_spec/TODOreference/new-types/union-types-spec.md new file mode 100644 index 000000000000..d250d3f11713 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/union-types-spec.md @@ -0,0 +1,172 @@ +--- +layout: doc-page +title: "Union Types - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/union-types-spec.html +--- + +## Syntax + +Syntactically, unions follow the same rules as intersections, but have a lower precedence, see +[Intersection Types - More Details](./intersection-types-spec.md). + +### Interaction with pattern matching syntax +`|` is also used in pattern matching to separate pattern alternatives and has +lower precedence than `:` as used in typed patterns, this means that: + +```scala +case _: A | B => ... +``` + +is still equivalent to: + +```scala +case (_: A) | B => ... +``` + +and not to: + +```scala +case _: (A | B) => ... +``` + +## Subtyping Rules + +- `A` is always a subtype of `A | B` for all `A`, `B`. +- If `A <: C` and `B <: C` then `A | B <: C` +- Like `&`, `|` is commutative and associative: + + ```scala + A | B =:= B | A + A | (B | C) =:= (A | B) | C + ``` + +- `&` is distributive over `|`: + + ```scala + A & (B | C) =:= A & B | A & C + ``` + +From these rules it follows that the _least upper bound_ (LUB) of a set of types +is the union of these types. This replaces the +[definition of least upper bound in the Scala 2 specification](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#least-upper-bounds-and-greatest-lower-bounds). + +## Motivation + +The primary reason for introducing union types in Scala is that they allow us to +guarantee that for every set of types, we can always form a finite LUB. This is +both useful in practice (infinite LUBs in Scala 2 were approximated in an ad-hoc +way, resulting in imprecise and sometimes incredibly long types) and in theory +(the type system of Scala 3 is based on the +[DOT calculus](https://infoscience.epfl.ch/record/227176/files/soundness_oopsla16.pdf), +which has union types). + +Additionally, union types are a useful construct when trying to give types to existing +dynamically typed APIs, this is why they're [an integral part of TypeScript](https://www.typescriptlang.org/docs/handbook/advanced-types.html#union-types) +and have even been [partially implemented in Scala.js](https://github.com/scala-js/scala-js/blob/master/library/src/main/scala/scala/scalajs/js/Union.scala). + +## Join of a union type + +In some situation described below, a union type might need to be widened to +a non-union type, for this purpose we define the _join_ of a union type `T1 | +... | Tn` as the smallest intersection type of base class instances of +`T1`,...,`Tn`. Note that union types might still appear as type arguments in the +resulting type, this guarantees that the join is always finite. + +### Example + +Given + +```scala +trait C[+T] +trait D +trait E +class A extends C[A] with D +class B extends C[B] with D with E +``` + +The join of `A | B` is `C[A | B] & D` + +## Type inference + +When inferring the result type of a definition (`val`, `var`, or `def`) and the +type we are about to infer is a union type, then we replace it by its join. +Similarly, when instantiating a type argument, if the corresponding type +parameter is not upper-bounded by a union type and the type we are about to +instantiate is a union type, we replace it by its join. This mirrors the +treatment of singleton types which are also widened to their underlying type +unless explicitly specified. The motivation is the same: inferring types +which are "too precise" can lead to unintuitive typechecking issues later on. + +**Note:** Since this behavior limits the usability of union types, it might +be changed in the future. For example by not widening unions that have been +explicitly written down by the user and not inferred, or by not widening a type +argument when the corresponding type parameter is covariant. + +See [PR #2330](https://github.com/lampepfl/dotty/pull/2330) and +[Issue #4867](https://github.com/lampepfl/dotty/issues/4867) for further discussions. + +### Example + +```scala +import scala.collection.mutable.ListBuffer +val x = ListBuffer(Right("foo"), Left(0)) +val y: ListBuffer[Either[Int, String]] = x +``` + +This code typechecks because the inferred type argument to `ListBuffer` in the +right-hand side of `x` was `Left[Int, Nothing] | Right[Nothing, String]` which +was widened to `Either[Int, String]`. If the compiler hadn't done this widening, +the last line wouldn't typecheck because `ListBuffer` is invariant in its +argument. + + +## Members + +The members of a union type are the members of its join. + +### Example + +The following code does not typecheck, because method `hello` is not a member of +`AnyRef` which is the join of `A | B`. + +```scala +trait A { def hello: String } +trait B { def hello: String } + +def test(x: A | B) = x.hello // error: value `hello` is not a member of A | B +``` + +On the other hand, the following would be allowed + +```scala +trait C { def hello: String } +trait A extends C with D +trait B extends C with E + +def test(x: A | B) = x.hello // ok as `hello` is a member of the join of A | B which is C +``` + +## Exhaustivity checking + +If the selector of a pattern match is a union type, the match is considered +exhaustive if all parts of the union are covered. + +## Erasure + +The erased type for `A | B` is the _erased least upper bound_ of the erased +types of `A` and `B`. Quoting from the documentation of `TypeErasure#erasedLub`, +the erased LUB is computed as follows: + +- if both argument are arrays of objects, an array of the erased LUB of the element types +- if both arguments are arrays of same primitives, an array of this primitive +- if one argument is array of primitives and the other is array of objects, + [`Object`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Object.html) +- if one argument is an array, [`Object`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Object.html) +- otherwise a common superclass or trait S of the argument classes, with the + following two properties: + * S is minimal: no other common superclass or trait derives from S + * S is last : in the linearization of the first argument type `|A|` + there are no minimal common superclasses or traits that + come after S. + The reason to pick last is that we prefer classes over traits that way, + which leads to more predictable bytecode and (?) faster dynamic dispatch. diff --git a/docs/_spec/TODOreference/new-types/union-types.md b/docs/_spec/TODOreference/new-types/union-types.md new file mode 100644 index 000000000000..ebc4565e36fb --- /dev/null +++ b/docs/_spec/TODOreference/new-types/union-types.md @@ -0,0 +1,46 @@ +--- +layout: doc-page +title: "Union Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/union-types.html +--- + +A union type `A | B` has as values all values of type `A` and also all values of type `B`. + + +```scala +case class UserName(name: String) +case class Password(hash: Hash) + +def help(id: UserName | Password) = + val user = id match + case UserName(name) => lookupName(name) + case Password(hash) => lookupPassword(hash) + ... +``` + +Union types are duals of intersection types. `|` is _commutative_: +`A | B` is the same type as `B | A`. + +The compiler will assign a union type to an expression only if such a +type is explicitly given. This can be seen in the following [REPL](https://docs.scala-lang.org/overviews/repl/overview.html) transcript: + +```scala +scala> val password = Password(123) +val password: Password = Password(123) + +scala> val name = UserName("Eve") +val name: UserName = UserName(Eve) + +scala> if true then name else password +val res2: Object = UserName(Eve) + +scala> val either: Password | UserName = if true then name else password +val either: Password | UserName = UserName(Eve) +``` + +The type of `res2` is `Object & Product`, which is a supertype of +`UserName` and `Password`, but not the least supertype `Password | +UserName`. If we want the least supertype, we have to give it +explicitly, as is done for the type of `either`. + +[More details](./union-types-spec.md) diff --git a/docs/_spec/TODOreference/other-new-features/control-syntax.md b/docs/_spec/TODOreference/other-new-features/control-syntax.md new file mode 100644 index 000000000000..92204690f0b7 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/control-syntax.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: New Control Syntax +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/control-syntax.html +--- + +Scala 3 has a new "quiet" syntax for control expressions that does not rely on +enclosing the condition in parentheses, and also allows to drop parentheses or braces +around the generators of a `for`-expression. Examples: +```scala +if x < 0 then + "negative" +else if x == 0 then + "zero" +else + "positive" + +if x < 0 then -x else x + +while x >= 0 do x = f(x) + +for x <- xs if x > 0 +yield x * x + +for + x <- xs + y <- ys +do + println(x + y) + +try body +catch case ex: IOException => handle +``` + +The rules in detail are: + + - The condition of an `if`-expression can be written without enclosing parentheses if it is followed by a `then`. + - The condition of a `while`-loop can be written without enclosing parentheses if it is followed by a `do`. + - The enumerators of a `for`-expression can be written without enclosing parentheses or braces if they are followed by a `yield` or `do`. + - A `do` in a `for`-expression expresses a `for`-loop. + - A `catch` can be followed by a single case on the same line. + If there are multiple cases, these have to appear within braces (just like in Scala 2) + or an indented block. +## Rewrites + +The Scala 3 compiler can rewrite source code from old syntax to new syntax and back. +When invoked with options `-rewrite -new-syntax` it will rewrite from old to new syntax, dropping parentheses and braces in conditions and enumerators. When invoked with options `-rewrite -old-syntax` it will rewrite in the reverse direction, inserting parentheses and braces as needed. diff --git a/docs/_spec/TODOreference/other-new-features/creator-applications.md b/docs/_spec/TODOreference/other-new-features/creator-applications.md new file mode 100644 index 000000000000..81f09d897955 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/creator-applications.md @@ -0,0 +1,57 @@ +--- +layout: doc-page +title: "Universal Apply Methods" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/creator-applications.html +--- + +Scala case classes generate apply methods, so that values of case classes can be created using simple +function application, without needing to write `new`. + +Scala 3 generalizes this scheme to all concrete classes. Example: + +```scala +class StringBuilder(s: String): + def this() = this("") + +StringBuilder("abc") // old: new StringBuilder("abc") +StringBuilder() // old: new StringBuilder() +``` + +This works since a companion object with two `apply` methods +is generated together with the class. The object looks like this: + +```scala +object StringBuilder: + inline def apply(s: String): StringBuilder = new StringBuilder(s) + inline def apply(): StringBuilder = new StringBuilder() +``` + +The synthetic object `StringBuilder` and its `apply` methods are called _constructor proxies_. +Constructor proxies are generated even for Java classes and classes coming from Scala 2. +The precise rules are as follows: + + 1. A constructor proxy companion object `object C` is created for a concrete class `C`, + provided the class does not have already a companion, and there is also no other value + or method named `C` defined or inherited in the scope where `C` is defined. + + 2. Constructor proxy `apply` methods are generated for a concrete class provided + + - the class has a companion object (which might have been generated in step 1), and + - that companion object does not already define a member named `apply`. + + Each generated `apply` method forwards to one constructor of the class. It has the + same type and value parameters as the constructor. + +Constructor proxy companions cannot be used as values by themselves. A proxy companion object must +be selected with `apply` (or be applied to arguments, in which case the `apply` is implicitly +inserted). + +Constructor proxies are also not allowed to shadow normal definitions. That is, +if an identifier resolves to a constructor proxy, and the same identifier is also +defined or imported in some other scope, an ambiguity is reported. + +## Motivation + +Leaving out `new` hides an implementation detail and makes code more pleasant to read. Even though +it requires a new rule, it will likely increase the perceived regularity of the language, since case +classes already provide function call creation syntax (and are often defined for this reason alone). diff --git a/docs/_spec/TODOreference/other-new-features/experimental-defs.md b/docs/_spec/TODOreference/other-new-features/experimental-defs.md new file mode 100644 index 000000000000..225b61161652 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/experimental-defs.md @@ -0,0 +1,318 @@ +--- +layout: doc-page +title: "Experimental Definitions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/experimental-defs.html +--- + +The [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) annotation allows the definition of an API that is not guaranteed backward binary or source compatibility. +This annotation can be placed on term or type definitions. + +## References to experimental definitions + +Experimental definitions can only be referenced in an experimental scope. Experimental scopes are defined as follows: + +1. The RHS of an experimental `def`, `val`, `var`, `given` or `type` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental + def x = () + + def d1 = x // error: value x is marked @experimental and therefore ... + @experimental def d2 = x + + val v1 = x // error: value x is marked @experimental and therefore ... + @experimental val v2 = x + + var vr1 = x // error: value x is marked @experimental and therefore ... + @experimental var vr2 = x + + lazy val lv1 = x // error: value x is marked @experimental and therefore ... + @experimental lazy val lv2 = x + ``` +
+ +
+ Example 2 + + ```scala + import scala.annotation.experimental + + @experimental + val x = () + + @experimental + def f() = () + + @experimental + object X: + def fx() = 1 + + def test1: Unit = + f() // error: def f is marked @experimental and therefore ... + x // error: value x is marked @experimental and therefore ... + X.fx() // error: object X is marked @experimental and therefore ... + import X.fx + fx() // error: object X is marked @experimental and therefore ... + + @experimental + def test2: Unit = + // references to f, x and X are ok because `test2` is experimental + f() + x + X.fx() + import X.fx + fx() + ``` +
+ +
+ Example 3 + + ```scala + import scala.annotation.experimental + + @experimental type E + + type A = E // error type E is marked @experimental and therefore ... + @experimental type B = E + ``` +
+ +
+ Example 4 + + ```scala + import scala.annotation.experimental + + @experimental class A + @experimental type X + @experimental type Y = Int + @experimental opaque type Z = Int + + def test: Unit = + new A // error: class A is marked @experimental and therefore ... + val i0: A = ??? // error: class A is marked @experimental and therefore ... + val i1: X = ??? // error: type X is marked @experimental and therefore ... + val i2: Y = ??? // error: type Y is marked @experimental and therefore ... + val i2: Z = ??? // error: type Y is marked @experimental and therefore ... + () + ``` +
+ +
+ Example 5 + + ```scala + @experimental + trait ExpSAM { + def foo(x: Int): Int + } + def bar(f: ExpSAM): Unit = {} // error: error form rule 2 + + def test: Unit = + bar(x => x) // error: reference to experimental SAM + () + ``` +
+ +2. The signatures of an experimental `def`, `val`, `var`, `given` and `type`, or constructors of `class` and `trait` are experimental scopes. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + @experimental class A + @experimental type X + @experimental type Y = Int + @experimental opaque type Z = Int + + def test1( + p1: A, // error: class A is marked @experimental and therefore ... + p2: List[A], // error: class A is marked @experimental and therefore ... + p3: X, // error: type X is marked @experimental and therefore ... + p4: Y, // error: type Y is marked @experimental and therefore ... + p5: Z, // error: type Z is marked @experimental and therefore ... + p6: Any = x // error: def x is marked @experimental and therefore ... + ): A = ??? // error: class A is marked @experimental and therefore ... + + @experimental def test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ): A = ??? + + class Test1( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error + p6: Any = x // error + ) {} + + @experimental class Test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ) {} + + trait Test1( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error + p6: Any = x // error + ) {} + + @experimental trait Test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ) {} + ``` +
+ +3. The `extends` clause of an experimental `class`, `trait` or `object` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + + @experimental class A1(x: Any) + class A2(x: Any) + + + @experimental class B1 extends A1(1) + class B2 extends A1(1) // error: class A1 is marked @experimental and therefore marked @experimental and therefore ... + + @experimental class C1 extends A2(x) + class C2 extends A2(x) // error def x is marked @experimental and therefore + ``` +
+ +4. The body of an experimental `class`, `trait` or `object` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + + @experimental class A { + def f = x // ok because A is experimental + } + + @experimental class B { + def f = x // ok because A is experimental + } + + @experimental object C { + def f = x // ok because A is experimental + } + + @experimental class D { + def f = { + object B { + x // ok because A is experimental + } + } + } + ``` + +
+ +5. Annotations of an experimental definition are in experimental scopes. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental class myExperimentalAnnot extends scala.annotation.Annotation + + @myExperimentalAnnot // error + def test: Unit = () + + @experimental + @myExperimentalAnnot + def test: Unit = () + ``` + +
+ +6. Any code compiled using a [_Nightly_](https://search.maven.org/artifact/org.scala-lang/scala3-compiler_3) or _Snapshot_ version of the compiler is considered to be in an experimental scope. +Can use the `-Yno-experimental` compiler flag to disable it and run as a proper release. + +In any other situation, a reference to an experimental definition will cause a compilation error. + +## Experimental inheritance + +All subclasses of an experimental `class` or `trait` must be marked as [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) even if they are in an experimental scope. +Anonymous classes and SAMs of experimental classes are considered experimental. + +We require explicit annotations to make sure we do not have completion or cycles issues with nested classes. This restriction could be relaxed in the future. + +## Experimental overriding + +For an overriding member `M` and overridden member `O`, if `O` is non-experimental then `M` must be non-experimental. + +This makes sure that we cannot have accidental binary incompatibilities such as the following change. +```diff +class A: + def f: Any = 1 +class B extends A: +- @experimental def f: Int = 2 +``` + +## Test frameworks + +Tests can be defined as experimental. Tests frameworks can execute tests using reflection even if they are in an experimental class, object or method. Examples: + +
+Example 1 + +Test that touch experimental APIs can be written as follows + +```scala +import scala.annotation.experimental + +@experimental def x = 2 + +class MyTests { + /*@Test*/ def test1 = x // error + @experimental /*@Test*/ def test2 = x +} + +@experimental +class MyExperimentalTests { + /*@Test*/ def test1 = x + /*@Test*/ def test2 = x +} +``` + +
diff --git a/docs/_spec/TODOreference/other-new-features/export.md b/docs/_spec/TODOreference/other-new-features/export.md new file mode 100644 index 000000000000..40e2ad9df248 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/export.md @@ -0,0 +1,234 @@ +--- +layout: doc-page +title: "Export Clauses" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/export.html +--- + +An export clause defines aliases for selected members of an object. Example: + +```scala +class BitMap +class InkJet + +class Printer: + type PrinterType + def print(bits: BitMap): Unit = ??? + def status: List[String] = ??? + +class Scanner: + def scan(): BitMap = ??? + def status: List[String] = ??? + +class Copier: + private val printUnit = new Printer { type PrinterType = InkJet } + private val scanUnit = new Scanner + + export scanUnit.scan + export printUnit.{status as _, *} + + def status: List[String] = printUnit.status ++ scanUnit.status +``` + +The two `export` clauses define the following _export aliases_ in class `Copier`: + +```scala +final def scan(): BitMap = scanUnit.scan() +final def print(bits: BitMap): Unit = printUnit.print(bits) +final type PrinterType = printUnit.PrinterType +``` + +They can be accessed inside `Copier` as well as from outside: + +```scala +val copier = new Copier +copier.print(copier.scan()) +``` + +An `export` clause has the same format as an import clause. Its general form is: + +```scala +export path . { sel_1, ..., sel_n } +``` + +It consists of a qualifier expression `path`, which must be a stable identifier, followed by +one or more selectors `sel_i` that identify what gets an alias. Selectors can be +of one of the following forms: + + - A _simple selector_ `x` creates aliases for all eligible members of `path` that are named `x`. + - A _renaming selector_ `x as y` creates aliases for all eligible members of `path` that are named `x`, but the alias is named `y` instead of `x`. + - An _omitting selector_ `x as _` prevents `x` from being aliased by a subsequent + wildcard selector. + - A _given selector_ `given x` has an optional type bound `x`. It creates aliases for all eligible given instances that conform to either `x`, or `Any` if `x` is omitted, except for members that are named by a previous simple, renaming, or omitting selector. + - A _wildcard selector_ `*` creates aliases for all eligible members of `path` except for given instances, + synthetic members generated by the compiler and those members that are named by a previous simple, renaming, or omitting selector. + \ + Notes: + - eligible construtor proxies are also included, even though they are synthetic members. + - members created by an export are also included. They are created by the compiler, but are not considered synthetic. + +A member is _eligible_ if all of the following holds: + + - its owner is not a base class of the class[(\*)](#note_class) containing the export clause, + - the member does not override a concrete definition that has as owner + a base class of the class containing the export clause. + - it is accessible at the export clause, + - it is not a constructor, nor the (synthetic) class part of an object, + - it is a given instance (declared with `given`) if and only if the export is from a _given selector_. + +It is a compile-time error if a simple or renaming selector does not identify +any eligible members. + +It is a compile-time error if a simple or renaming selector does not identify any eligible members. + +Type members are aliased by type definitions, and term members are aliased by method definitions. For instance: +```scala +object O: + class C(val x: Int) + def m(c: C): Int = c.x + 1 +export O.* + // generates + // type C = O.C + // def m(c: O.C): Int = O.m(c) +``` + +Export aliases copy the type and value parameters of the members they refer to. +Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: + + - Export aliases cannot be overridden, since they are final. + - Export aliases cannot override concrete members in base classes, since they are + not marked `override`. + - However, export aliases can implement deferred members of base classes. + +Export aliases for public value definitions that are accessed without +referring to private values in the qualifier path +are marked by the compiler as "stable" and their result types are the singleton types of the aliased definitions. This means that they can be used as parts of stable identifier paths, even though they are technically methods. For instance, the following is OK: +```scala +class C { type T } +object O { val c: C = ... } +export O.c +def f: c.T = ... +``` + + +**Restrictions:** + + 1. Export clauses can appear in classes or they can appear at the top-level. An export clause cannot appear as a statement in a block. + 1. If an export clause contains a wildcard or given selector, it is forbidden for its qualifier path to refer to a package. This is because it is not yet known how to safely track wildcard dependencies to a package for the purposes of incremental compilation. + 1. An export renaming hides un-renamed exports matching the target name. For instance, the following + clause would be invalid since `B` is hidden by the renaming `A as B`. + ```scala + export {A as B, B} // error: B is hidden + ``` + + 1. Renamings in an export clause must have pairwise different target names. For instance, the following clause would be invalid: + ```scala + export {A as C, B as C} // error: duplicate renaming + + 1. Simple renaming exports like + ```scala + export status as stat + ``` + are not supported yet. They would run afoul of the restriction that the + exported `a` cannot be already a member of the object containing the export. + This restriction might be lifted in the future. + + +(\*) **Note:** Unless otherwise stated, the term "class" in this discussion also includes object and trait definitions. + +## Motivation + +It is a standard recommendation to prefer composition over inheritance. This is really an application of the principle of least power: Composition treats components as blackboxes whereas inheritance can affect the internal workings of components through overriding. Sometimes the close coupling implied by inheritance is the best solution for a problem, but where this is not necessary the looser coupling of composition is better. + +So far, object-oriented languages including Scala made it much easier to use inheritance than composition. Inheritance only requires an `extends` clause whereas composition required a verbose elaboration of a sequence of forwarders. So in that sense, object-oriented languages are pushing +programmers to a solution that is often too powerful. Export clauses redress the balance. They make composition relationships as concise and easy to express as inheritance relationships. Export clauses also offer more flexibility than extends clauses since members can be renamed or omitted. + +Export clauses also fill a gap opened by the shift from package objects to top-level definitions. One occasionally useful idiom that gets lost in this shift is a package object inheriting from some class. The idiom is often used in a facade like pattern, to make members +of internal compositions available to users of a package. Top-level definitions are not wrapped in a user-defined object, so they can't inherit anything. However, top-level definitions can be export clauses, which supports the facade design pattern in a safer and +more flexible way. + +## Export Clauses in Extensions + +An export clause may also appear in an extension. + +Example: +```scala +class StringOps(x: String): + def *(n: Int): String = ... + def capitalize: String = ... + +extension (x: String) + def take(n: Int): String = x.substring(0, n) + def drop(n: Int): String = x.substring(n) + private def moreOps = new StringOps(x) + export moreOps.* +``` +In this case the qualifier expression must be an identifier that refers to a unique parameterless extension method in the same extension clause. The export will create +extension methods for all accessible term members +in the result of the qualifier path. For instance, the extension above would be expanded to +```scala +extension (x: String) + def take(n: Int): String = x.substring(0, n) + def drop(n: Int): String = x.substring(n) + private def moreOps = StringOps(x) + def *(n: Int): String = moreOps.*(n) + def capitalize: String = moreOps.capitalize +``` + +## Syntax changes: + +``` +TemplateStat ::= ... + | Export +TopStat ::= ... + | Export +ExtMethod ::= ... + | Export +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*’ | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +``` + +## Elaboration of Export Clauses + +Export clauses raise questions about the order of elaboration during type checking. +Consider the following example: + +```scala +class B { val c: Int } +object a { val b = new B } +export a.* +export b.* +``` + +Is the `export b.*` clause legal? If yes, what does it export? Is it equivalent to `export a.b.*`? What about if we swap the last two clauses? + +``` +export b.* +export a.* +``` + +To avoid tricky questions like these, we fix the elaboration order of exports as follows. + +Export clauses are processed when the type information of the enclosing object or class is completed. Completion so far consisted of the following steps: + + 1. Elaborate any annotations of the class. + 2. Elaborate the parameters of the class. + 3. Elaborate the self type of the class, if one is given. + 4. Enter all definitions of the class as class members, with types to be completed + on demand. + 5. Determine the types of all parents of the class. + + With export clauses, the following steps are added: + + 6. Compute the types of all paths in export clauses. + 7. Enter export aliases for the eligible members of all paths in export clauses. + +It is important that steps 6 and 7 are done in sequence: We first compute the types of _all_ +paths in export clauses and only after this is done we enter any export aliases as class members. This means that a path of an export clause cannot refer to an alias made available +by another export clause of the same class. diff --git a/docs/_spec/TODOreference/other-new-features/indentation.md b/docs/_spec/TODOreference/other-new-features/indentation.md new file mode 100644 index 000000000000..e931030ab696 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/indentation.md @@ -0,0 +1,509 @@ +--- +layout: doc-page +title: "Optional Braces" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/indentation.html +--- + +Scala 3 enforces some rules on indentation and allows some occurrences of braces `{...}` to be optional: + +- First, some badly indented programs are flagged with warnings. +- Second, some occurrences of braces `{...}` are made optional. Generally, the rule + is that adding a pair of optional braces will not change the meaning of a well-indented program. + +These changes can be turned off with the compiler flag `-no-indent`. + +## Indentation Rules + +The compiler enforces two rules for well-indented programs, flagging violations as warnings. + + 1. In a brace-delimited region, no statement is allowed to start to the left + of the first statement after the opening brace that starts a new line. + + This rule is helpful for finding missing closing braces. It prevents errors like: + + ```scala + if (x < 0) { + println(1) + println(2) + + println("done") // error: indented too far to the left + ``` + + 2. If significant indentation is turned off (i.e. under Scala 2 mode or under `-no-indent`) and we are at the start of an indented sub-part of an expression, and the indented part ends in a newline, the next statement must start at an indentation width less than the sub-part. This prevents errors where an opening brace was forgotten, as in + + ```scala + if (x < 0) + println(1) + println(2) // error: missing `{` + ``` + +These rules still leave a lot of leeway how programs should be indented. For instance, they do not impose +any restrictions on indentation within expressions, nor do they require that all statements of an indentation block line up exactly. + +The rules are generally helpful in pinpointing the root cause of errors related to missing opening or closing braces. These errors are often quite hard to diagnose, in particular in large programs. + +## Optional Braces + +The compiler will insert `` or `` +tokens at certain line breaks. Grammatically, pairs of `` and `` tokens have the same effect as pairs of braces `{` and `}`. + +The algorithm makes use of a stack `IW` of previously encountered indentation widths. The stack initially holds a single element with a zero indentation width. The _current indentation width_ is the indentation width of the top of the stack. + +There are two rules: + + 1. An `` is inserted at a line break, if + + - An indentation region can start at the current position in the source, and + - the first token on the next line has an indentation width strictly greater + than the current indentation width + + An indentation region can start + + - after the leading parameters of an `extension`, or + - after a `with` in a given instance, or + - after a `:` at the start of a template body (see discussion of `` below), or + - after one of the following tokens: + + ``` + = => ?=> <- catch do else finally for + if match return then throw try while yield + ``` + + - after the closing `)` of a condition in an old-style `if` or `while`. + - after the closing `)` or `}` of the enumerations of an old-style `for` loop without a `do`. + + If an `` is inserted, the indentation width of the token on the next line + is pushed onto `IW`, which makes it the new current indentation width. + + 2. An `` is inserted at a line break, if + + - the first token on the next line has an indentation width strictly less + than the current indentation width, and + - the last token on the previous line is not one of the following tokens + which indicate that the previous statement continues: + ``` + then else do catch finally yield match + ``` + - if the first token on the next line is a + [leading infix operator](../changed-features/operators.md). + then its indentation width is less then the current indentation width, + and it either matches a previous indentation width or is also less + than the enclosing indentation width. + + If an `` is inserted, the top element is popped from `IW`. + If the indentation width of the token on the next line is still less than the new current indentation width, step (2) repeats. Therefore, several `` tokens + may be inserted in a row. + + The following two additional rules support parsing of legacy code with ad-hoc layout. They might be withdrawn in future language versions: + + - An `` is also inserted if the next token following a statement sequence starting with an `` closes an indentation region, i.e. is one of `then`, `else`, `do`, `catch`, `finally`, `yield`, `}`, `)`, `]` or `case`. + + - An `` is finally inserted in front of a comma that follows a statement sequence starting with an `` if the indented region is itself enclosed in parentheses. + +It is an error if the indentation width of the token following an `` does not match the indentation of some previous line in the enclosing indentation region. For instance, the following would be rejected. + +```scala +if x < 0 then + -x + else // error: `else` does not align correctly + x +``` + +Indentation tokens are only inserted in regions where newline statement separators are also inferred: +at the top-level, inside braces `{...}`, but not inside parentheses `(...)`, patterns or types. + +**Note:** The rules for leading infix operators above are there to make sure that +```scala + one + + two.match + case 1 => b + case 2 => c + + three +``` +is parsed as `one + (two.match ...) + three`. Also, that +```scala +if x then + a + + b + + c +else d +``` +is parsed as `if x then a + b + c else d`. + +## Optional Braces Around Template Bodies + +The Scala grammar uses the term _template body_ for the definitions of a class, trait, or object that are normally enclosed in braces. The braces around a template body can also be omitted by means of the following rule. + +A template body can alternatively consist of a colon followed by one or more indented statements. To this purpose we introduce a new `` token that reads as +the standard colon "`:`" but is generated instead of it where `` +is legal according to the context free syntax, but only if the previous token +is an alphanumeric identifier, a backticked identifier, or one of the tokens `this`, `super`, "`)`", and "`]`". + +An indentation region can start after a ``. A template body may be either enclosed in braces, or it may start with +` ` and end with ``. +Analogous rules apply for enum bodies, type refinements, and local packages containing nested definitions. + +With these new rules, the following constructs are all valid: + +```scala +trait A: + def f: Int + +class C(x: Int) extends A: + def f = x + +object O: + def f = 3 + +enum Color: + case Red, Green, Blue + +new A: + def f = 3 + +package p: + def a = 1 + +package q: + def b = 2 +``` + +In each case, the `:` at the end of line can be replaced without change of meaning by a pair of braces that enclose the following indented definition(s). + +The syntax changes allowing this are as follows: + +Define for an arbitrary sequence of tokens or non-terminals `TS`: + +``` +:<<< TS >>> ::= ‘{’ TS ‘}’ + | +``` +Then the grammar changes as follows: +``` +TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> +EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> +Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> +Packaging ::= ‘package’ QualId :<<< TopStats >>> +``` + +## Spaces vs Tabs + +Indentation prefixes can consist of spaces and/or tabs. Indentation widths are the indentation prefixes themselves, ordered by the string prefix relation. So, so for instance "2 tabs, followed by 4 spaces" is strictly less than "2 tabs, followed by 5 spaces", but "2 tabs, followed by 4 spaces" is incomparable to "6 tabs" or to "4 spaces, followed by 2 tabs". It is an error if the indentation width of some line is incomparable with the indentation width of the region that's current at that point. To avoid such errors, it is a good idea not to mix spaces and tabs in the same source file. + +## Indentation and Braces + +Indentation can be mixed freely with braces `{...}`, as well as brackets `[...]` and parentheses `(...)`. For interpreting indentation inside such regions, the following rules apply. + + 1. The assumed indentation width of a multiline region enclosed in braces is the + indentation width of the first token that starts a new line after the opening brace. + + 2. The assumed indentation width of a multiline region inside brackets or parentheses is: + + - if the opening bracket or parenthesis is at the end of a line, the indentation width of token following it, + - otherwise, the indentation width of the enclosing region. + + 3. On encountering a closing brace `}`, bracket `]` or parenthesis `)`, as many `` tokens as necessary are inserted to close all open nested indentation regions. + +For instance, consider: +```scala +{ + val x = f(x: Int, y => + x * ( + y + 1 + ) + + (x + + x) + ) +} +``` + - Here, the indentation width of the region enclosed by the braces is 3 (i.e. the indentation width of the +statement starting with `val`). + - The indentation width of the region in parentheses that follows `f` is also 3, since the opening + parenthesis is not at the end of a line. + - The indentation width of the region in parentheses around `y + 1` is 9 + (i.e. the indentation width of `y + 1`). + - Finally, the indentation width of the last region in parentheses starting with `(x` is 6 (i.e. the indentation width of the indented region following the `=>`. + +## Special Treatment of Case Clauses + +The indentation rules for `match` expressions and `catch` clauses are refined as follows: + +- An indentation region is opened after a `match` or `catch` also if the following `case` + appears at the indentation width that's current for the `match` itself. +- In that case, the indentation region closes at the first token at that + same indentation width that is not a `case`, or at any token with a smaller + indentation width, whichever comes first. + +The rules allow to write `match` expressions where cases are not indented themselves, as in the example below: + +```scala +x match +case 1 => print("I") +case 2 => print("II") +case 3 => print("III") +case 4 => print("IV") +case 5 => print("V") + +println(".") +``` + +## Using Indentation to Signal Statement Continuation + +Indentation is used in some situations to decide whether to insert a virtual semicolon between +two consecutive lines or to treat them as one statement. Virtual semicolon insertion is +suppressed if the second line is indented more relative to the first one, and either the second line +starts with "`(`", "`[`", or "`{`" or the first line ends with `return`. Examples: + +```scala +f(x + 1) + (2, 3) // equivalent to `f(x + 1)(2, 3)` + +g(x + 1) +(2, 3) // equivalent to `g(x + 1); (2, 3)` + +h(x + 1) + {} // equivalent to `h(x + 1){}` + +i(x + 1) +{} // equivalent to `i(x + 1); {}` + +if x < 0 then return + a + b // equivalent to `if x < 0 then return a + b` + +if x < 0 then return +println(a + b) // equivalent to `if x < 0 then return; println(a + b)` +``` +In Scala 2, a line starting with "`{`" always continues the function call on the preceding line, +irrespective of indentation, whereas a virtual semicolon is inserted in all other cases. +The Scala-2 behavior is retained under source `-no-indent` or `-source 3.0-migration`. + + + +## The End Marker + +Indentation-based syntax has many advantages over other conventions. But one possible problem is that it makes it hard to discern when a large indentation region ends, since there is no specific token that delineates the end. Braces are not much better since a brace by itself also contains no information about what region is closed. + +To solve this problem, Scala 3 offers an optional `end` marker. Example: + +```scala +def largeMethod(...) = + ... + if ... then ... + else + ... // a large block + end if + ... // more code +end largeMethod +``` + +An `end` marker consists of the identifier `end` and a follow-on specifier token that together constitute all the tokes of a line. Possible specifier tokens are +identifiers or one of the following keywords + +```scala +if while for match try new this val given +``` + +End markers are allowed in statement sequences. The specifier token `s` of an end marker must correspond to the statement that precedes it. This means: + +- If the statement defines a member `x` then `s` must be the same identifier `x`. +- If the statement defines a constructor then `s` must be `this`. +- If the statement defines an anonymous given, then `s` must be `given`. +- If the statement defines an anonymous extension, then `s` must be `extension`. +- If the statement defines an anonymous class, then `s` must be `new`. +- If the statement is a `val` definition binding a pattern, then `s` must be `val`. +- If the statement is a package clause that refers to package `p`, then `s` must be the same identifier `p`. +- If the statement is an `if`, `while`, `for`, `try`, or `match` statement, then `s` must be that same token. + +For instance, the following end markers are all legal: + +```scala +package p1.p2: + + abstract class C(): + + def this(x: Int) = + this() + if x > 0 then + val a :: b = + x :: Nil + end val + var y = + x + end y + while y > 0 do + println(y) + y -= 1 + end while + try + x match + case 0 => println("0") + case _ => + end match + finally + println("done") + end try + end if + end this + + def f: String + end C + + object C: + given C = + new C: + def f = "!" + end f + end new + end given + end C + + extension (x: C) + def ff: String = x.f ++ x.f + end extension + +end p2 +``` + +### When to Use End Markers + +It is recommended that `end` markers are used for code where the extent of an indentation region is not immediately apparent "at a glance". People will have different preferences what this means, but one can nevertheless give some guidelines that stem from experience. An end marker makes sense if + +- the construct contains blank lines, or +- the construct is long, say 15-20 lines or more, +- the construct ends heavily indented, say 4 indentation levels or more. + +If none of these criteria apply, it's often better to not use an end marker since the code will be just as clear and more concise. If there are several ending regions that satisfy one of the criteria above, we usually need an end marker only for the outermost closed region. So cascades of end markers as in the example above are usually better avoided. + +### Syntax + +``` +EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL +EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ + | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ +BlockStat ::= ... | EndMarker +TemplateStat ::= ... | EndMarker +TopStat ::= ... | EndMarker +``` + +## Example + +Here is a (somewhat meta-circular) example of code using indentation. It provides a concrete representation of indentation widths as defined above together with efficient operations for constructing and comparing indentation widths. + +```scala +enum IndentWidth: + case Run(ch: Char, n: Int) + case Conc(l: IndentWidth, r: Run) + + def <= (that: IndentWidth): Boolean = this match + case Run(ch1, n1) => + that match + case Run(ch2, n2) => n1 <= n2 && (ch1 == ch2 || n1 == 0) + case Conc(l, r) => this <= l + case Conc(l1, r1) => + that match + case Conc(l2, r2) => l1 == l2 && r1 <= r2 + case _ => false + + def < (that: IndentWidth): Boolean = + this <= that && !(that <= this) + + override def toString: String = + this match + case Run(ch, n) => + val kind = ch match + case ' ' => "space" + case '\t' => "tab" + case _ => s"'$ch'-character" + val suffix = if n == 1 then "" else "s" + s"$n $kind$suffix" + case Conc(l, r) => + s"$l, $r" + +object IndentWidth: + private inline val MaxCached = 40 + + private val spaces = IArray.tabulate(MaxCached + 1)(new Run(' ', _)) + private val tabs = IArray.tabulate(MaxCached + 1)(new Run('\t', _)) + + def Run(ch: Char, n: Int): Run = + if n <= MaxCached && ch == ' ' then + spaces(n) + else if n <= MaxCached && ch == '\t' then + tabs(n) + else + new Run(ch, n) + end Run + + val Zero = Run(' ', 0) +end IndentWidth +``` + +## Settings and Rewrites + +Significant indentation is enabled by default. It can be turned off by giving any of the options `-no-indent`, `-old-syntax` and `-source 3.0-migration`. If indentation is turned off, it is nevertheless checked that indentation conforms to the logical program structure as defined by braces. If that is not the case, the compiler issues a warning. + +The Scala 3 compiler can rewrite source code to indented code and back. +When invoked with options `-rewrite -indent` it will rewrite braces to +indented regions where possible. When invoked with options `-rewrite -no-indent` it will rewrite in the reverse direction, inserting braces for indentation regions. +The `-indent` option only works on [new-style syntax](./control-syntax.md). So to go from old-style syntax to new-style indented code one has to invoke the compiler twice, first with options `-rewrite -new-syntax`, then again with options +`-rewrite -indent`. To go in the opposite direction, from indented code to old-style syntax, it's `-rewrite -no-indent`, followed by `-rewrite -old-syntax`. + +## Variant: Indentation Marker `:` for Arguments + +Generally, the possible indentation regions coincide with those regions where braces `{...}` are also legal, no matter whether the braces enclose an expression or a set of definitions. There is one exception, though: Arguments to functions can be enclosed in braces but they cannot be simply indented instead. Making indentation always significant for function arguments would be too restrictive and fragile. + +To allow such arguments to be written without braces, a variant of the indentation scheme is implemented under language import +```scala +import language.experimental.fewerBraces +``` +In this variant, a `` token is also recognized where function argument would be expected. Examples: + +```scala +times(10): + println("ah") + println("ha") +``` + +or + +```scala +credentials `++`: + val file = Path.userHome / ".credentials" + if file.exists + then Seq(Credentials(file)) + else Seq() +``` + +or + +```scala +xs.map: + x => + val y = x - 1 + y * y +``` +What's more, a `:` in these settings can also be followed on the same line by the parameter part and arrow of a lambda. So the last example could be compressed to this: + +```scala +xs.map: x => + val y = x - 1 + y * y +``` +and the following would also be legal: +```scala +xs.foldLeft(0): (x, y) => + x + y +``` + +The grammar changes for this variant are as follows. + +``` +SimpleExpr ::= ... + | SimpleExpr ColonArgument +InfixExpr ::= ... + | InfixExpr id ColonArgument +ColonArgument ::= colon [LambdaStart] + indent (CaseClauses | Block) outdent +LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + | HkTypeParamClause ‘=>’ +``` \ No newline at end of file diff --git a/docs/_spec/TODOreference/other-new-features/kind-polymorphism.md b/docs/_spec/TODOreference/other-new-features/kind-polymorphism.md new file mode 100644 index 000000000000..8f0172c4c04b --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/kind-polymorphism.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: "Kind Polymorphism" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html +--- + +Normally type parameters in Scala are partitioned into _kinds_. First-level types are types of values. Higher-kinded types are type constructors +such as `List` or `Map`. The kind of a type is indicated by the top type of which it is a subtype. Normal types are subtypes of `Any`, +covariant single argument type constructors such as `List` are subtypes of `[+X] =>> Any`, and the `Map` type constructor is +a subtype of `[X, +Y] =>> Any`. + +A type can be used only as prescribed by its kind. Subtypes of `Any` cannot be applied to type arguments whereas subtypes of `[X] =>> Any` +_must_ be applied to a type argument, unless they are passed to type parameters of the same kind. + +Sometimes we would like to have type parameters that can have more than one kind, for instance to define an implicit +value that works for parameters of any kind. This is now possible through a form of (_subtype_) kind polymorphism. +Kind polymorphism relies on the special type [`scala.AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) that can be used as an upper bound of a type. + +```scala +def f[T <: AnyKind] = ... +``` + +The actual type arguments of `f` can then be types of arbitrary kinds. So the following would all be legal: + +```scala +f[Int] +f[List] +f[Map] +f[[X] =>> String] +``` + +We call type parameters and abstract types with an `AnyKind` upper bound _any-kinded types_. +Since the actual kind of an any-kinded type is unknown, its usage must be heavily restricted: An any-kinded type +can be neither the type of a value, nor can it be instantiated with type parameters. So about the only +thing one can do with an any-kinded type is to pass it to another any-kinded type argument. +Nevertheless, this is enough to achieve some interesting generalizations that work across kinds, typically +through advanced uses of implicits. + +(todo: insert good concise example) + +Some technical details: [`AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) is a synthesized class just like `Any`, but without any members. It extends no other class. +It is declared `abstract` and `final`, so it can be neither instantiated nor extended. + +`AnyKind` plays a special role in Scala's subtype system: It is a supertype of all other types no matter what their kind is. It is also assumed to be kind-compatible with all other types. Furthermore, `AnyKind` is treated as a higher-kinded type (so it cannot be used as a type of values), but at the same time it has no type parameters (so it cannot be instantiated). + +**Note**: This feature is considered experimental but stable and it can be disabled under compiler flag +(i.e. `-Yno-kind-polymorphism`). diff --git a/docs/_spec/TODOreference/other-new-features/matchable.md b/docs/_spec/TODOreference/other-new-features/matchable.md new file mode 100644 index 000000000000..234fdf03220c --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/matchable.md @@ -0,0 +1,141 @@ +--- +layout: doc-page +title: "The Matchable Trait" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html +--- + +A new trait [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) controls the ability to pattern match. + +## The Problem + +The Scala 3 standard library has a type [`IArray`](https://scala-lang.org/api/3.x/scala.html#IArray-0) for immutable +arrays that is defined like this: + +```scala + opaque type IArray[+T] = Array[_ <: T] +``` + +The `IArray` type offers extension methods for `length` and `apply`, but not for `update`; hence it seems values of type `IArray` cannot be updated. + +However, there is a potential hole due to pattern matching. Consider: + +```scala +val imm: IArray[Int] = ... +imm match + case a: Array[Int] => a(0) = 1 +``` + +The test will succeed at runtime since [`IArray`](https://scala-lang.org/api/3.x/scala.html#IArray-0)s _are_ represented as +`Array`s at runtime. But if we allowed it, it would break the fundamental abstraction of immutable arrays. + +__Aside:__ One could also achieve the same by casting: + +```scala +imm.asInstanceOf[Array[Int]](0) = 1 +``` + +But that is not as much of a problem since in Scala `asInstanceOf` is understood to be low-level and unsafe. By contrast, a pattern match that compiles without warning or error should not break abstractions. + +Note also that the problem is not tied to [opaque types](opaques.md) as match selectors. The following slight variant with a value of parametric +type `T` as match selector leads to the same problem: + +```scala +def f[T](x: T) = x match + case a: Array[Int] => a(0) = 0 +f(imm) +``` + +Finally, note that the problem is not linked to just [opaque types](opaques.md). No unbounded type parameter or abstract type should be decomposable with a pattern match. + +## The Solution + +There is a new type [`scala.Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) that controls pattern matching. When typing a pattern match of a constructor pattern `C(...)` or +a type pattern `_: C` it is required that the selector type conforms +to `Matchable`. If that's not the case a warning is issued. For instance when compiling the example at the start of this section we get: + +``` +> sc ../new/test.scala -source future +-- Warning: ../new/test.scala:4:12 --------------------------------------------- +4 | case a: Array[Int] => a(0) = 0 + | ^^^^^^^^^^ + | pattern selector should be an instance of Matchable, + | but it has unmatchable type IArray[Int] instead +``` + +To allow migration from Scala 2 and cross-compiling +between Scala 2 and 3 the warning is turned on only for `-source future-migration` or higher. + +[`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) is a universal trait with `Any` as its parent class. It is extended by both [`AnyVal`](https://scala-lang.org/api/3.x/scala/AnyVal.html) and [`AnyRef`](https://scala-lang.org/api/3.x/scala/AnyRef.html). Since `Matchable` is a supertype of every concrete value or reference class it means that instances of such classes can be matched as before. However, match selectors of the following types will produce a warning: + +- Type `Any`: if pattern matching is required one should use `Matchable` instead. +- Unbounded type parameters and abstract types: If pattern matching is required they should have an upper bound `Matchable`. +- Type parameters and abstract types that are only bounded by some + universal trait: Again, `Matchable` should be added as a bound. + +Here is the hierarchy of top-level classes and traits with their defined methods: + +```scala +abstract class Any: + def getClass + def isInstanceOf + def asInstanceOf + def == + def != + def ## + def equals + def hashCode + def toString + +trait Matchable extends Any + +class AnyVal extends Any, Matchable +class Object extends Any, Matchable +``` + +[`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) is currently a marker trait without any methods. Over time +we might migrate methods `getClass` and `isInstanceOf` to it, since these are closely related to pattern-matching. + +## `Matchable` and Universal Equality + +Methods that pattern-match on selectors of type `Any` will need a cast once the +Matchable warning is turned on. The most common such method is the universal +`equals` method. It will have to be written as in the following example: + +```scala +class C(val x: String): + + override def equals(that: Any): Boolean = + that.asInstanceOf[Matchable] match + case that: C => this.x == that.x + case _ => false +``` + +The cast of `that` to [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) serves as an indication that universal equality +is unsafe in the presence of abstract types and opaque types since it cannot properly distinguish the meaning of a type from its representation. The cast +is guaranteed to succeed at run-time since `Any` and [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) both erase to +`Object`. + +For instance, consider the definitions + +```scala +opaque type Meter = Double +def Meter(x: Double): Meter = x + +opaque type Second = Double +def Second(x: Double): Second = x +``` + +Here, universal `equals` will return true for + +```scala + Meter(10).equals(Second(10)) +``` + +even though this is clearly false mathematically. With [multiversal equality](../contextual/multiversal-equality.md) one can mitigate that problem somewhat by turning + +```scala + import scala.language.strictEquality + Meter(10) == Second(10) +``` + +into a type error. diff --git a/docs/_spec/TODOreference/other-new-features/opaques-details.md b/docs/_spec/TODOreference/other-new-features/opaques-details.md new file mode 100644 index 000000000000..d7305a249089 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/opaques-details.md @@ -0,0 +1,126 @@ +--- +layout: doc-page +title: "Opaque Type Aliases: More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/opaques-details.html +--- + +## Syntax + +``` +Modifier ::= ... + | ‘opaque’ +``` + +`opaque` is a [soft modifier](../soft-modifier.md). It can still be used as a normal identifier when it is not in front of a definition keyword. + +Opaque type aliases must be members of classes, traits, or objects, or they are defined +at the top-level. They cannot be defined in local blocks. + +## Type Checking + +The general form of a (monomorphic) opaque type alias is + +```scala +opaque type T >: L <: U = R +``` + +where the lower bound `L` and the upper bound `U` may be missing, in which case they are assumed to be [`scala.Nothing`](https://scala-lang.org/api/3.x/scala/Nothing.html) and [`scala.Any`](https://scala-lang.org/api/3.x/scala/Any.html), respectively. If bounds are given, it is checked that the right-hand side `R` conforms to them, i.e. `L <: R` and `R <: U`. F-bounds are not supported for opaque type aliases: `T` is not allowed to appear in `L` or `U`. + +Inside the scope of the alias definition, the alias is transparent: `T` is treated +as a normal alias of `R`. Outside its scope, the alias is treated as the abstract type +```scala +type T >: L <: U +``` +A special case arises if the opaque type alias is defined in an object. Example: + +```scala +object o: + opaque type T = R +``` + +In this case we have inside the object (also for non-opaque types) that `o.T` is equal to +`T` or its expanded form `o.this.T`. Equality is understood here as mutual subtyping, i.e. +`o.T <: o.this.T` and `o.this.T <: T`. Furthermore, we have by the rules of opaque type aliases +that `o.this.T` equals `R`. The two equalities compose. That is, inside `o`, it is +also known that `o.T` is equal to `R`. This means the following code type-checks: + +```scala +object o: + opaque type T = Int + val x: Int = id(2) +def id(x: o.T): o.T = x +``` + +Opaque type aliases cannot be `private` and cannot be overridden in subclasses. +Opaque type aliases cannot have a context function type as right-hand side. + +## Type Parameters of Opaque Types + +Opaque type aliases can have a single type parameter list. The following aliases +are well-formed +```scala +opaque type F[T] = (T, T) +opaque type G = [T] =>> List[T] +``` +but the following are not: +```scala +opaque type BadF[T] = [U] =>> (T, U) +opaque type BadG = [T] =>> [U] => (T, U) +``` + +## Translation of Equality + +Comparing two values of opaque type with `==` or `!=` normally uses universal equality, +unless another overloaded `==` or `!=` operator is defined for the type. To avoid +boxing, the operation is mapped after type checking to the (in-)equality operator +defined on the underlying type. For instance, +```scala + opaque type T = Int + + ... + val x: T + val y: T + x == y // uses Int equality for the comparison. +``` + +## Top-level Opaque Types + +An opaque type alias on the top-level is transparent in all other top-level definitions in the sourcefile where it appears, but is opaque in nested +objects and classes and in all other source files. Example: +```scala +// in test1.scala +opaque type A = String +val x: A = "abc" + +object obj: + val y: A = "abc" // error: found: "abc", required: A + +// in test2.scala +def z: String = x // error: found: A, required: String +``` +This behavior becomes clear if one recalls that top-level definitions are placed in their own synthetic object. For instance, the code in `test1.scala` would expand to +```scala +object test1$package: + opaque type A = String + val x: A = "abc" + +object obj: + val y: A = "abc" // error: cannot assign "abc" to opaque type alias A +``` +The opaque type alias `A` is transparent in its scope, which includes the definition of `x`, but not the definitions of `obj` and `y`. + + +## Relationship to SIP 35 + +Opaque types in Scala 3 are an evolution from what is described in +[Scala SIP 35](https://docs.scala-lang.org/sips/opaque-types.html). + +The differences compared to the state described in this SIP are: + + 1. Opaque type aliases cannot be defined anymore in local statement sequences. + 2. The scope where an opaque type alias is visible is now the whole scope where + it is defined, instead of just a companion object. + 3. The notion of a companion object for opaque type aliases has been dropped. + 4. Opaque type aliases can have bounds. + 5. The notion of type equality involving opaque type aliases has been clarified. It was + strengthened with respect to the previous implementation of SIP 35. diff --git a/docs/_spec/TODOreference/other-new-features/opaques.md b/docs/_spec/TODOreference/other-new-features/opaques.md new file mode 100644 index 000000000000..d8c4d37bcb3b --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/opaques.md @@ -0,0 +1,179 @@ +--- +layout: doc-page +title: "Opaque Type Aliases" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/opaques.html +--- + +Opaque types aliases provide type abstraction without any overhead. Example: + +```scala +object MyMath: + + opaque type Logarithm = Double + + object Logarithm: + + // These are the two ways to lift to the Logarithm type + + def apply(d: Double): Logarithm = math.log(d) + + def safe(d: Double): Option[Logarithm] = + if d > 0.0 then Some(math.log(d)) else None + + end Logarithm + + // Extension methods define opaque types' public APIs + extension (x: Logarithm) + def toDouble: Double = math.exp(x) + def + (y: Logarithm): Logarithm = Logarithm(math.exp(x) + math.exp(y)) + def * (y: Logarithm): Logarithm = x + y + +end MyMath +``` + +This introduces `Logarithm` as a new abstract type, which is implemented as `Double`. +The fact that `Logarithm` is the same as `Double` is only known in the scope where +`Logarithm` is defined, which in the above example corresponds to the object `MyMath`. +Or in other words, within the scope, it is treated as a type alias, but this is opaque to the outside world +where, in consequence, `Logarithm` is seen as an abstract type that has nothing to do with `Double`. + +The public API of `Logarithm` consists of the `apply` and `safe` methods defined in the companion object. +They convert from `Double`s to `Logarithm` values. Moreover, an operation `toDouble` that converts the other way, and operations `+` and `*` are defined as extension methods on `Logarithm` values. +The following operations would be valid because they use functionality implemented in the `MyMath` object. + +```scala +import MyMath.Logarithm + +val l = Logarithm(1.0) +val l2 = Logarithm(2.0) +val l3 = l * l2 +val l4 = l + l2 +``` + +But the following operations would lead to type errors: + +```scala +val d: Double = l // error: found: Logarithm, required: Double +val l2: Logarithm = 1.0 // error: found: Double, required: Logarithm +l * 2 // error: found: Int(2), required: Logarithm +l / l2 // error: `/` is not a member of Logarithm +``` + +## Bounds For Opaque Type Aliases + +Opaque type aliases can also come with bounds. Example: + +```scala +object Access: + + opaque type Permissions = Int + opaque type PermissionChoice = Int + opaque type Permission <: Permissions & PermissionChoice = Int + + extension (x: PermissionChoice) + def | (y: PermissionChoice): PermissionChoice = x | y + extension (x: Permissions) + def & (y: Permissions): Permissions = x | y + extension (granted: Permissions) + def is(required: Permissions) = (granted & required) == required + def isOneOf(required: PermissionChoice) = (granted & required) != 0 + + val NoPermission: Permission = 0 + val Read: Permission = 1 + val Write: Permission = 2 + val ReadWrite: Permissions = Read | Write + val ReadOrWrite: PermissionChoice = Read | Write + +end Access +``` + +The `Access` object defines three opaque type aliases: + +- `Permission`, representing a single permission, +- `Permissions`, representing a set of permissions with the meaning "all of these permissions granted", +- `PermissionChoice`, representing a set of permissions with the meaning "at least one of these permissions granted". + +Outside the `Access` object, values of type `Permissions` may be combined using the `&` operator, +where `x & y` means "all permissions in `x` *and* in `y` granted". +Values of type `PermissionChoice` may be combined using the `|` operator, +where `x | y` means "a permission in `x` *or* in `y` granted". + +Note that inside the `Access` object, the `&` and `|` operators always resolve to the corresponding methods of `Int`, +because members always take precedence over extension methods. +For that reason, the `|` extension method in `Access` does not cause infinite recursion. + +In particular, the definition of `ReadWrite` must use `|`, the bitwise operator for `Int`, +even though client code outside `Access` would use `&`, the extension method on `Permissions`. +The internal representations of `ReadWrite` and `ReadOrWrite` are identical, but this is not visible to the client, +which is interested only in the semantics of `Permissions`, as in the example below. + +All three opaque type aliases have the same underlying representation type `Int`. The +`Permission` type has an upper bound `Permissions & PermissionChoice`. This makes +it known outside the `Access` object that `Permission` is a subtype of the other +two types. Hence, the following usage scenario type-checks. + +```scala +object User: + import Access.* + + case class Item(rights: Permissions) + extension (item: Item) + def +(other: Item): Item = Item(item.rights & other.rights) + + val roItem = Item(Read) // OK, since Permission <: Permissions + val woItem = Item(Write) + val rwItem = Item(ReadWrite) + val noItem = Item(NoPermission) + + assert(!roItem.rights.is(ReadWrite)) + assert(roItem.rights.isOneOf(ReadOrWrite)) + + assert(rwItem.rights.is(ReadWrite)) + assert(rwItem.rights.isOneOf(ReadOrWrite)) + + assert(!noItem.rights.is(ReadWrite)) + assert(!noItem.rights.isOneOf(ReadOrWrite)) + + assert((roItem + woItem).rights.is(ReadWrite)) +end User +``` +On the other hand, the call `roItem.rights.isOneOf(ReadWrite)` would give a type error: +```scala + assert(roItem.rights.isOneOf(ReadWrite)) + ^^^^^^^^^ + Found: (Access.ReadWrite : Access.Permissions) + Required: Access.PermissionChoice +``` +`Permissions` and `PermissionChoice` are different, unrelated types outside `Access`. + + +## Opaque Type Members on Classes +While typically, opaque types are used together with objects to hide implementation details of a module, they can also be used with classes. + +For example, we can redefine the above example of Logarithms as a class. +```scala +class Logarithms: + + opaque type Logarithm = Double + + def apply(d: Double): Logarithm = math.log(d) + + def safe(d: Double): Option[Logarithm] = + if d > 0.0 then Some(math.log(d)) else None + + def mul(x: Logarithm, y: Logarithm) = x + y +``` + +Opaque type members of different instances are treated as different: +```scala +val l1 = new Logarithms +val l2 = new Logarithms +val x = l1(1.5) +val y = l1(2.6) +val z = l2(3.1) +l1.mul(x, y) // type checks +l1.mul(x, z) // error: found l2.Logarithm, required l1.Logarithm +``` +In general, one can think of an opaque type as being only transparent in the scope of `private[this]`. + +[More details](opaques-details.md) diff --git a/docs/_spec/TODOreference/other-new-features/open-classes.md b/docs/_spec/TODOreference/other-new-features/open-classes.md new file mode 100644 index 000000000000..764c234df599 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/open-classes.md @@ -0,0 +1,80 @@ +--- +layout: doc-page +title: "Open Classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/open-classes.html +--- + +An `open` modifier on a class signals that the class is planned for extensions. Example: +```scala +// File Writer.scala +package p + +open class Writer[T]: + + /** Sends to stdout, can be overridden */ + def send(x: T) = println(x) + + /** Sends all arguments using `send` */ + def sendAll(xs: T*) = xs.foreach(send) +end Writer + +// File EncryptedWriter.scala +package p + +class EncryptedWriter[T: Encryptable] extends Writer[T]: + override def send(x: T) = super.send(encrypt(x)) +``` +An open class typically comes with some documentation that describes +the internal calling patterns between methods of the class as well as hooks that can be overridden. We call this the _extension contract_ of the class. It is different from the _external contract_ between a class and its users. + +Classes that are not open can still be extended, but only if at least one of two alternative conditions is met: + + - The extending class is in the same source file as the extended class. In this case, the extension is usually an internal implementation matter. + + - The language feature [`adhocExtensions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$adhocExtensions$.html) is enabled for the extending class. This is typically enabled by an import clause in the source file of the extension: + ```scala + import scala.language.adhocExtensions + ``` + Alternatively, the feature can be enabled by the compiler option `-language:adhocExtensions`. + If the feature is not enabled, the compiler will issue a "feature" warning. For instance, if the `open` modifier on class `Writer` is dropped, compiling `EncryptedWriter` would produce a warning: + ``` + -- Feature Warning: EncryptedWriter.scala:6:14 ---- + |class EncryptedWriter[T: Encryptable] extends Writer[T] + | ^ + |Unless class Writer is declared 'open', its extension + | in a separate file should be enabled + |by adding the import clause 'import scala.language.adhocExtensions' + |or by setting the compiler option -language:adhocExtensions. + ``` + +## Motivation + +When writing a class, there are three possible expectations of extensibility: + +1. The class is intended to allow extensions. This means one should expect +a carefully worked out and documented extension contract for the class. + +2. Extensions of the class are forbidden, for instance to make correctness or security guarantees. + +3. There is no firm decision either way. The class is not _a priori_ intended for extensions, but if others find it useful to extend on an _ad-hoc_ basis, let them go ahead. However, they are on their own in this case. There is no documented extension contract, and future versions of the class might break the extensions (by rearranging internal call patterns, for instance). + +The three cases are clearly distinguished by using `open` for (1), `final` for (2) and no modifier for (3). + +It is good practice to avoid _ad-hoc_ extensions in a code base, since they tend to lead to fragile systems that are hard to evolve. But there +are still some situations where these extensions are useful: for instance, +to mock classes in tests, or to apply temporary patches that add features or fix bugs in library classes. That's why _ad-hoc_ extensions are permitted, but only if there is an explicit opt-in via a language feature import. + +## Details + + - `open` is a soft modifier. It is treated as a normal identifier + unless it is in modifier position. + - An `open` class cannot be `final` or `sealed`. + - Traits or `abstract` classes are always `open`, so `open` is redundant for them. + +## Relationship with `sealed` + +A class that is neither `abstract` nor `open` is similar to a `sealed` class: it can still be extended, but only in the same source file. The difference is what happens if an extension of the class is attempted in another source file. For a `sealed` class, this is an error, whereas for a simple non-open class, this is still permitted provided the [`adhocExtensions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$adhocExtensions$.html) feature is enabled, and it gives a warning otherwise. + +## Migration + +`open` is a new modifier in Scala 3. To allow cross compilation between Scala 2.13 and Scala 3.0 without warnings, the feature warning for ad-hoc extensions is produced only under `-source future`. It will be produced by default from Scala 3.1 on. diff --git a/docs/_spec/TODOreference/other-new-features/other-new-features.md b/docs/_spec/TODOreference/other-new-features/other-new-features.md new file mode 100644 index 000000000000..974a8548cb68 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/other-new-features.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Other New Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features.html +--- + +The following pages document new features of Scala 3. diff --git a/docs/_spec/TODOreference/other-new-features/parameter-untupling-spec.md b/docs/_spec/TODOreference/other-new-features/parameter-untupling-spec.md new file mode 100644 index 000000000000..e5165550fc0d --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/parameter-untupling-spec.md @@ -0,0 +1,89 @@ +--- +layout: doc-page +title: "Parameter Untupling - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/parameter-untupling-spec.html +--- + +## Motivation + +Say you have a list of pairs + +```scala +val xs: List[(Int, Int)] +``` + +and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to their sum. +Previously, the best way to do this was with a pattern-matching decomposition: + +```scala +xs.map { + case (x, y) => x + y +} +``` +While correct, this is inconvenient. Instead, we propose to write it the following way: + +```scala +xs.map { + (x, y) => x + y +} +``` + +or, equivalently: + +```scala +xs.map(_ + _) +``` + +Generally, a function value with `n > 1` parameters can be converted to a function with tupled arguments if the expected type is a unary function type of the form `((T_1, ..., T_n)) => U`. + +## Type Checking + +The type checking happens in two steps: + +1. Check whether parameter untupling is feasible +2. Adapt the function and type check it + +### Feasibility Check + +Suppose a function `f` of the form `(p1, ..., pn) => e` (where `n > 1`), with `p1, ..., pn` as parameters and `e` as function body. + +If the expected type for checking `f` is a fully defined function type of the form `TupleN[T1, ..., Tn] => R` (or an equivalent SAM-type), where each type `Ti` fits the corresponding parameter `pi`. Then `f` is feasible for parameter untupling with the expected type `TupleN[T1, ..., Tn] => R`. + +A type `Ti` fits a parameter `pi` if one of the following two cases is `true`: + +* `pi` comes without a type, i.e. it is a simple identifier or `_`. +* `pi` is of the form `x: Ui` or `_: Ui` and `Ti <: Ui`. + +Parameter untupling composes with eta-expansion. That is, an n-ary function generated by eta-expansion can in turn be adapted to the expected type with parameter untupling. + +### Term adaptation + +If the function + +```scala +(p1, ..., pn) => e +``` + +is feasible for parameter untupling with the expected type `TupleN[T1, ..., Tn] => Te`, then continue to type check the following adapted function + +```scala +(x: TupleN[T1, ..., Tn]) => + def p1: T1 = x._1 + ... + def pn: Tn = x._n + e +``` + +with the same expected type. +## Migration + +Code like this could not be written before, hence the new notation is not ambiguous after adoption. + +It is possible that someone has written an implicit conversion from `(T1, ..., Tn) => R` to `TupleN[T1, ..., Tn] => R` for some `n`. +Such a conversion is now only useful for general conversions of function values, when parameter untupling is not applicable. +Some care is required to implement the conversion efficiently. +Obsolete conversions could be detected and fixed by [`Scalafix`](https://scalacenter.github.io/scalafix/). + +## Reference + +For more information, see [Issue #897](https://github.com/lampepfl/dotty/issues/897). diff --git a/docs/_spec/TODOreference/other-new-features/parameter-untupling.md b/docs/_spec/TODOreference/other-new-features/parameter-untupling.md new file mode 100644 index 000000000000..fcc1fa11d519 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/parameter-untupling.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: "Parameter Untupling" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/parameter-untupling.html +--- + +Say you have a list of pairs + +```scala +val xs: List[(Int, Int)] +``` + +and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to +their sum. Previously, the best way to do this was with a pattern-matching decomposition: + +```scala +xs map { + case (x, y) => x + y +} +``` + +While correct, this is also inconvenient and confusing, since the `case` +suggests that the pattern match could fail. As a shorter and clearer alternative Scala 3 now allows + +```scala +xs.map { + (x, y) => x + y +} +``` + +or, equivalently: + +```scala +xs.map(_ + _) +``` +and +```scala +def combine(i: Int, j: Int) = i + j +xs.map(combine) +``` + +Generally, a function value with `n > 1` parameters is wrapped in a +function type of the form `((T_1, ..., T_n)) => U` if that is the expected type. +The tuple parameter is decomposed and its elements are passed directly to the underlying function. + +More specifically, the adaptation is applied to the mismatching formal +parameter list. In particular, the adaptation is not a conversion +between function types. That is why the following is not accepted: + +```scala +val combiner: (Int, Int) => Int = _ + _ +xs.map(combiner) // Type Mismatch +``` + +The function value must be explicitly tupled, rather than the parameters untupled: +```scala +xs.map(combiner.tupled) +``` + +A conversion may be provided in user code: + +```scala +import scala.language.implicitConversions +transparent inline implicit def `fallback untupling`(f: (Int, Int) => Int): ((Int, Int)) => Int = + p => f(p._1, p._2) // use specialized apply instead of unspecialized `tupled` +xs.map(combiner) +``` + +Parameter untupling is attempted before conversions are applied, so that a conversion in scope +cannot subvert untupling. + +## Reference + +For more information see: + +* [More details](./parameter-untupling-spec.md) +* [Issue #897](https://github.com/lampepfl/dotty/issues/897). diff --git a/docs/_spec/TODOreference/other-new-features/safe-initialization.md b/docs/_spec/TODOreference/other-new-features/safe-initialization.md new file mode 100644 index 000000000000..757038eac786 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/safe-initialization.md @@ -0,0 +1,343 @@ +--- +layout: doc-page +title: "Safe Initialization" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/safe-initialization.html +--- + +Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Ysafe-init`. + +The design and implementation of the initialization checker is described in the +paper _Safe object initialization, abstractly_ [3]. + +## A Quick Glance + +To get a feel of how it works, we first show several examples below. + +### Parent-Child Interaction + +Given the following code snippet: + +``` scala +abstract class AbstractFile: + def name: String + val extension: String = name.substring(4) + +class RemoteFile(url: String) extends AbstractFile: + val localFile: String = s"${url.##}.tmp" // error: usage of `localFile` before it's initialized + def name: String = localFile +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/AbstractFile.scala:7:4 ------------------------------ +7 | val localFile: String = s"${url.##}.tmp" // error: usage of `localFile` before it's initialized + | ^ + | Access non-initialized field value localFile. Calling trace: + | -> val extension: String = name.substring(4) [ AbstractFile.scala:3 ] + | -> def name: String = localFile [ AbstractFile.scala:8 ] +``` + +### Inner-Outer Interaction + +Given the code below: + +``` scala +object Trees: + class ValDef { counter += 1 } + class EmptyValDef extends ValDef + val theEmptyValDef = new EmptyValDef + private var counter = 0 // error +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/trees.scala:5:14 ------------------------------------ +5 | private var counter = 0 // error + | ^ + | Access non-initialized field variable counter. Calling trace: + | -> val theEmptyValDef = new EmptyValDef [ trees.scala:4 ] + | -> class EmptyValDef extends ValDef [ trees.scala:3 ] + | -> class ValDef { counter += 1 } [ trees.scala:2 ] +``` + +### Functions + +Given the code below: + +``` scala +abstract class Parent: + val f: () => String = () => this.message + def message: String + +class Child extends Parent: + val a = f() + val b = "hello" // error + def message: String = b +``` + +The checker reports: + +``` scala +-- Warning: tests/init/neg/features-high-order.scala:7:6 ----------------------- +7 | val b = "hello" // error + | ^ + |Access non-initialized field value b. Calling trace: + | -> val a = f() [ features-high-order.scala:6 ] + | -> val f: () => String = () => this.message [ features-high-order.scala:2 ] + | -> def message: String = b [ features-high-order.scala:8 ] +``` +## Design Goals + +We establish the following design goals: + +- __Sound__: checking always terminates, and is sound for common and reasonable usage (over-approximation) +- __Expressive__: support common and reasonable initialization patterns +- __Friendly__: simple rules, minimal syntactic overhead, informative error messages +- __Modular__: modular checking, no analysis beyond project boundary +- __Fast__: instant feedback +- __Simple__: no changes to core type system, explainable by a simple theory + +By _reasonable usage_, we include the following use cases (but not restricted to them): + +- Access fields on `this` and outer `this` during initialization +- Call methods on `this` and outer `this` during initialization +- Instantiate inner class and call methods on such instances during initialization +- Capture fields in functions + +## Principles + +To achieve the goals, we uphold the following fundamental principles: +_stackability_, _monotonicity_, _scopability_ and _authority_. + +Stackability means that all fields of a class are initialized at the end of the +class body. Scala enforces this property in syntax by demanding that all fields +are initialized at the end of the primary constructor, except for the language +feature below: + +``` scala +var x: T = _ +``` + +Control effects such as exceptions may break this property, as the +following example shows: + +``` scala +class MyException(val b: B) extends Exception("") +class A: + val b = try { new B } catch { case myEx: MyException => myEx.b } + println(b.a) + +class B: + throw new MyException(this) + val a: Int = 1 +``` + +In the code above, the control effect teleport the uninitialized value +wrapped in an exception. In the implementation, we avoid the problem +by ensuring that the values that are thrown must be transitively initialized. + +Monotonicity means that the initialization status of an object should +not go backward: initialized fields continue to be initialized, a +field points to an initialized object may not later point to an +object under initialization. As an example, the following code will be rejected: + +``` scala +trait Reporter: + def report(msg: String): Unit + +class FileReporter(ctx: Context) extends Reporter: + ctx.typer.reporter = this // ctx now reaches an uninitialized object + val file: File = new File("report.txt") + def report(msg: String) = file.write(msg) +``` + +In the code above, suppose `ctx` points to a transitively initialized +object. Now the assignment at line 3 makes `this`, which is not fully +initialized, reachable from `ctx`. This makes field usage dangerous, +as it may indirectly reach uninitialized fields. + +Monotonicity is based on a well-known technique called _heap monotonic +typestate_ to ensure soundness in the presence of aliasing +[1]. Roughly speaking, it means initialization state should not go backwards. + +Scopability means that there are no side channels to access to partially +constructed objects. Control effects like coroutines, delimited +control, resumable exceptions may break the property, as they can transport a +value upper in the stack (not in scope) to be reachable from the current scope. +Static fields can also serve as a teleport thus breaks this property. In the +implementation, we need to enforce that teleported values are transitively +initialized. + +The three principles above contribute to _local reasoning about initialization_, +which means: + +> An initialized environment can only produce initialized values. + +For example, if the arguments to an `new`-expression are transitively +initialized, so is the result. If the receiver and arguments in a method call +are transitively initialized, so is the result. + +Local reasoning about initialization gives rise to a fast initialization +checker, as it avoids whole-program analysis. + +The principle of authority goes hand-in-hand with monotonicity: the principle +of monotonicity stipulates that initialization states cannot go backwards, while +the principle of authority stipulates that the initialization states may not +go forward at arbitrary locations due to aliasing. In Scala, we may only +advance initialization states of objects in the class body when a field is +defined with a mandatory initializer or at local reasoning points when the object +becomes transitively initialized. + +## Abstract Values + +There are three fundamental abstractions for initialization states of objects: + +- __Cold__: A cold object may have uninitialized fields. +- __Warm__: A warm object has all its fields initialized but may reach _cold_ objects. +- __Hot__: A hot object is transitively initialized, i.e., it only reaches warm objects. + +In the initialization checker, the abstraction `Warm` is refined to handle inner +classes and multiple constructors: + +- __Warm[C] { outer = V, ctor, args = Vs }__: A warm object of class `C`, where the immediate outer of `C` is `V`, the constructor is `ctor` and constructor arguments are `Vs`. + +The initialization checker checks each concrete class separately. The abstraction `ThisRef` +represents the current object under initialization: + +- __ThisRef[C]__: The current object of class `C` under initialization. + +The initialization state of the current object is stored in the abstract heap as an +abstract object. The abstract heap also serves as a cache for the field values +of warm objects. `Warm` and `ThisRef` are "addresses" of the abstract objects stored +in the abstract heap. + +Two more abstractions are introduced to support functions and conditional +expressions: + +- __Fun(e, V, C)__: An abstract function value where `e` is the code, `V` is the + abstract value for `this` inside the function body and the function is located + inside the class `C`. + +- __Refset(Vs)__: A set of abstract values `Vs`. + +A value `v` is _effectively hot_ if any of the following is true: + +- `v` is `Hot`. +- `v` is `ThisRef` and all fields of the underlying object are assigned. +- `v` is `Warm[C] { ... }` and + 1. `C` does not contain inner classes; and + 2. Calling any method on `v` encounters no initialization errors and the method return value is _effectively hot_; and + 3. Each field of `v` is _effectively hot_. +- `v` is `Fun(e, V, C)` and calling the function encounters no errors and the + function return value is _effectively hot_. +- The root object (refered by `ThisRef`) is _effectively hot_. + +An effectively hot value can be regarded as transitively initialized thus can +be safely leaked via method arguments or as RHS of reassignment. +The initialization checker tries to promote non-hot values to effectively hot +whenenver possible. + +## Rules + +With the established principles and design goals, the following rules are imposed: + +1. The field access `e.f` or method call `e.m()` is illegal if `e` is _cold_. + + A cold value should not be used. + +2. The field access `e.f` is invalid if `e` has the value `ThisRef` and `f` is not initialized. + +3. In an assignment `o.x = e`, the expression `e` must be _effectively hot_. + + This is how monotonicity is enforced in the system. Note that in an + initialization `val f: T = e`, the expression `e` may point to a non-hot + value. + +4. Arguments to method calls must be _effectively hot_. + + Escape of `this` in the constructor is commonly regarded as an anti-pattern. + + However, passing non-hot values as argument to another constructor is allowed, to support + creation of cyclic data structures. The checker will ensure that the escaped + non-initialized object is not used, i.e. calling methods or accessing fields + on the escaped object is not allowed. + + An exception is for calling synthetic `apply`s of case classes. For example, + the method call `Some.apply(e)` will be interpreted as `new Some(e)`, thus + is valid even if `e` is not hot. + + Another exception to this rule is parametric method calls. For example, in + `List.apply(e)`, the argument `e` may be non-hot. If that is the case, the + result value of the parametric method call is taken as _cold_. + +5. Method calls on hot values with effectively hot arguments produce hot results. + + This rule is assured by local reasoning about initialization. + +6. Method calls on `ThisRef` and warm values will be resolved statically and the + corresponding method bodies are checked. + +7. In a new expression `new p.C(args)`, if the values of `p` and `args` are + effectively hot, then the result value is also hot. + + This is assured by local reasoning about initialization. + +8. In a new expression `new p.C(args)`, if any value of `p` and `args` is not + effectively hot, then the result value takes the form `Warm[C] { outer = Vp, args = Vargs }`. The initialization code for the class `C` is checked again to make + sure the non-hot values are used properly. + + In the above, `Vp` is the widened value of `p` --- the widening happens if `p` + is a warm value `Warm[D] { outer = V, args }` and we widen it to + `Warm[D] { outer = Cold, args }`. + + The variable `Vargs` represents values of `args` with non-hot values widened + to `Cold`. + + The motivation for the widening is to finitize the abstract domain and ensure + termination of the initialization check. + +9. The scrutinee in a pattern match and the values in return and throw statements must be _effectively hot_. + +## Modularity + +The analysis takes the primary constructor of concrete classes as entry points. +It follows the constructors of super classes, which might be defined in another project. +The analysis takes advantage of TASTy for analyzing super classes defined in another project. + +The crossing of project boundary raises a concern about modularity. It is +well-known in object-oriented programming that superclass and subclass are +tightly coupled. For example, adding a method in the superclass requires +recompiling the child class for checking safe overriding. + +Initialization is no exception in this respect. The initialization of an object +essentially involves close interaction between subclass and superclass. If the +superclass is defined in another project, the crossing of project boundary +cannot be avoided for soundness of the analysis. + +Meanwhile, inheritance across project boundary has been under scrutiny and the +introduction of [open classes](./open-classes.md) mitigate the concern here. +For example, the initialization check could enforce that the constructors of +open classes may not contain method calls on `this` or introduce annotations as +a contract. + +The feedback from the community on the topic is welcome. + +## Back Doors + +Occasionally you may want to suppress warnings reported by the +checker. You can either write `e: @unchecked` to tell the checker to +skip checking for the expression `e`, or you may use the old trick: +mark some fields as lazy. + +## Caveats + +- The system cannot provide safety guarantee when extending Java or Scala 2 classes. +- Safe initialization of global objects is only partially checked. + +## References + +1. Fähndrich, M. and Leino, K.R.M., 2003, July. [_Heap monotonic typestates_](https://www.microsoft.com/en-us/research/publication/heap-monotonic-typestate/). In International Workshop on Aliasing, Confinement and Ownership in object-oriented programming (IWACO). +2. Fengyun Liu, Ondřej Lhoták, Aggelos Biboudis, Paolo G. Giarrusso, and Martin Odersky. [_A type-and-effect system for object initialization_](https://dl.acm.org/doi/10.1145/3428243). OOPSLA, 2020. +3. Fengyun Liu, Ondřej Lhoták, Enze Xing, Nguyen Cao Pham. [_Safe object initialization, abstractly_](https://dl.acm.org/doi/10.1145/3486610.3486895). Scala 2021. diff --git a/docs/_spec/TODOreference/other-new-features/targetName.md b/docs/_spec/TODOreference/other-new-features/targetName.md new file mode 100644 index 000000000000..63c4cf1ec0df --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/targetName.md @@ -0,0 +1,118 @@ +--- +layout: doc-page +title: "The @targetName annotation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/targetName.html +--- + +A [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation on a definition defines an alternate name for the implementation of that definition. Example: + +```scala +import scala.annotation.targetName + +object VecOps: + extension [T](xs: Vec[T]) + @targetName("append") + def ++= [T] (ys: Vec[T]): Vec[T] = ... +``` + +Here, the `++=` operation is implemented (in Byte code or native code) under the name `append`. The implementation name affects the code that is generated, and is the name under which code from other languages can call the method. For instance, `++=` could be invoked from Java like this: + +```java +VecOps.append(vec1, vec2) +``` + +The [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation has no bearing on Scala usages. Any application of that method in Scala has to use `++=`, not `append`. + +## Details + + 1. `@targetName` is defined in package `scala.annotation`. It takes a single argument + of type `String`. That string is called the _external name_ of the definition + that's annotated. + + 2. A `@targetName` annotation can be given for all kinds of definitions except a top-level `class`, `trait`, or `object`. + + 3. The name given in a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation must be a legal name + for the defined entities on the host platform. + + 4. It is recommended that definitions with symbolic names have a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation. This will establish an alternate name that is easier to search for and + will avoid cryptic encodings in runtime diagnostics. + + 5. Definitions with names in backticks that are not legal host platform names + should also have a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation. + +## Relationship with Overriding + +[`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotations are significant for matching two method definitions to decide whether they conflict or override each other. Two method definitions match if they have the same name, signature, and erased name. Here, + +- The _signature_ of a definition consists of the names of the erased types of all (value-) parameters and the method's result type. +- The _erased name_ of a method definition is its target name if a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation is given and its defined name otherwise. + +This means that `@targetName` annotations can be used to disambiguate two method definitions that would otherwise clash. For instance. + +```scala +def f(x: => String): Int = x.length +def f(x: => Int): Int = x + 1 // error: double definition +``` + +The two definitions above clash since their erased parameter types are both [`Function0`](https://scala-lang.org/api/3.x/scala/Function0.html), which is the type of the translation of a by-name-parameter. Hence they have the same names and signatures. But we can avoid the clash by adding a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation to either method or to both of them. Example: + +```scala +@targetName("f_string") +def f(x: => String): Int = x.length +def f(x: => Int): Int = x + 1 // OK +``` + +This will produce methods `f_string` and `f` in the generated code. + +However, [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotations are not allowed to break overriding relationships +between two definitions that have otherwise the same names and types. So the following would be in error: + +```scala +import annotation.targetName +class A: + def f(): Int = 1 +class B extends A: + @targetName("g") def f(): Int = 2 +``` + +The compiler reports here: + +``` +-- Error: test.scala:6:23 ------------------------------------------------------ +6 | @targetName("g") def f(): Int = 2 + | ^ + |error overriding method f in class A of type (): Int; + | method f of type (): Int should not have a @targetName + | annotation since the overridden member hasn't one either +``` + +The relevant overriding rules can be summarized as follows: + +- Two members can override each other if their names and signatures are the same, + and they either have the same erased names or the same types. +- If two members override, then both their erased names and their types must be the same. + +As usual, any overriding relationship in the generated code must also +be present in the original code. So the following example would also be in error: + +```scala +import annotation.targetName +class A: + def f(): Int = 1 +class B extends A: + @targetName("f") def g(): Int = 2 +``` + +Here, the original methods `g` and `f` do not override each other since they have +different names. But once we switch to target names, there is a clash that is reported by the compiler: + +``` +-- [E120] Naming Error: test.scala:4:6 ----------------------------------------- +4 |class B extends A: + | ^ + | Name clash between defined and inherited member: + | def f(): Int in class A at line 3 and + | def g(): Int in class B at line 5 + | have the same name and type after erasure. +1 error found +``` diff --git a/docs/_spec/TODOreference/other-new-features/threadUnsafe-annotation.md b/docs/_spec/TODOreference/other-new-features/threadUnsafe-annotation.md new file mode 100644 index 000000000000..ae1af1e4b671 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/threadUnsafe-annotation.md @@ -0,0 +1,18 @@ +--- +layout: doc-page +title: "The @threadUnsafe annotation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/threadUnsafe-annotation.html +--- + +A new annotation [`@threadUnsafe`](https://scala-lang.org/api/3.x/scala/annotation/threadUnsafe.html) can be used on a field which defines +a `lazy val`. When this annotation is used, the initialization of the +[`lazy val`](../changed-features/lazy-vals-init.md) will use a faster mechanism which is not thread-safe. + +## Example + +```scala +import scala.annotation.threadUnsafe + +class Hello: + @threadUnsafe lazy val x: Int = 1 +``` diff --git a/docs/_spec/TODOreference/other-new-features/trait-parameters.md b/docs/_spec/TODOreference/other-new-features/trait-parameters.md new file mode 100644 index 000000000000..c704e73ce9b8 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/trait-parameters.md @@ -0,0 +1,88 @@ +--- +layout: doc-page +title: "Trait Parameters" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/trait-parameters.html +--- + +Scala 3 allows traits to have parameters, just like classes have parameters. + +```scala +trait Greeting(val name: String): + def msg = s"How are you, $name" + +class C extends Greeting("Bob"): + println(msg) +``` + +Arguments to a trait are evaluated immediately before the trait is initialized. + +One potential issue with trait parameters is how to prevent +ambiguities. For instance, you might try to extend `Greeting` twice, +with different parameters. + +```scala +class D extends C, Greeting("Bill") // error: parameter passed twice +``` + +Should this print "Bob" or "Bill"? In fact this program is illegal, +because it violates the second rule of the following for trait parameters: + + 1. If a class `C` extends a parameterized trait `T`, and its superclass does not, `C` _must_ pass arguments to `T`. + + 2. If a class `C` extends a parameterized trait `T`, and its superclass does as well, `C` _must not_ pass arguments to `T`. + + 3. Traits must never pass arguments to parent traits. + +Here's a trait extending the parameterized trait `Greeting`. + +```scala +trait FormalGreeting extends Greeting: + override def msg = s"How do you do, $name" +``` +As is required, no arguments are passed to `Greeting`. However, this poses an issue +when defining a class that extends `FormalGreeting`: + +```scala +class E extends FormalGreeting // error: missing arguments for `Greeting`. +``` + +The correct way to write `E` is to extend both `Greeting` and +`FormalGreeting` (in either order): + +```scala +class E extends Greeting("Bob"), FormalGreeting +``` + +## Traits With Context Parameters + +This "explicit extension required" rule is relaxed if the missing trait contains only +[context parameters](../contextual/using-clauses.md). In that case the trait reference is +implicitly inserted as an additional parent with inferred arguments. For instance, +here's a variant of greetings where the addressee is a context parameter of type +`ImpliedName`: + +```scala +case class ImpliedName(name: String): + override def toString = name + +trait ImpliedGreeting(using val iname: ImpliedName): + def msg = s"How are you, $iname" + +trait ImpliedFormalGreeting extends ImpliedGreeting: + override def msg = s"How do you do, $iname" + +class F(using iname: ImpliedName) extends ImpliedFormalGreeting +``` + +The definition of `F` in the last line is implicitly expanded to +```scala +class F(using iname: ImpliedName) extends + Object, + ImpliedGreeting(using iname), + ImpliedFormalGreeting(using iname) +``` +Note the inserted reference to the super trait `ImpliedGreeting`, which was not mentioned explicitly. + +## Reference + +For more information, see [Scala SIP 25](http://docs.scala-lang.org/sips/pending/trait-parameters.html). diff --git a/docs/_spec/TODOreference/other-new-features/transparent-traits.md b/docs/_spec/TODOreference/other-new-features/transparent-traits.md new file mode 100644 index 000000000000..699ce0b9ddd8 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/transparent-traits.md @@ -0,0 +1,70 @@ +--- +layout: doc-page +title: "Transparent Traits" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/transparent-traits.html +--- + +Traits are used in two roles: + + 1. As mixins for other classes and traits + 2. As types of vals, defs, or parameters + +Some traits are used primarily in the first role, and we usually do not want to see them in inferred types. An example is the [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) trait that the compiler adds as a mixin trait to every case class or case object. In Scala 2, this parent trait sometimes makes inferred types more complicated than they should be. Example: + +```scala +trait Kind +case object Var extends Kind +case object Val extends Kind +val x = Set(if condition then Val else Var) +``` + +Here, the inferred type of `x` is `Set[Kind & Product & Serializable]` whereas one would have hoped it to be `Set[Kind]`. The reasoning for this particular type to be inferred is as follows: + +- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. +- A union type is widened in type inference to the least supertype that is not a union type. + In the example, this type is `Kind & Product & Serializable` since all three traits are traits of both `Val` and `Var`. + So that type becomes the inferred element type of the set. + +Scala 3 allows one to mark a mixin trait as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: + +```scala +transparent trait S +trait Kind +object Var extends Kind, S +object Val extends Kind, S +val x = Set(if condition then Val else Var) +``` + +Now `x` has inferred type `Set[Kind]`. The common transparent trait `S` does not +appear in the inferred type. + +## Transparent Traits + +The traits [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.html), [`java.io.Serializable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/Serializable.html) and [`java.lang.Comparable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Comparable.html) +are treated automatically as transparent. Other traits are turned into transparent traits using the modifier `transparent`. Scala 2 traits can also be made transparent +by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interoperability is no longer needed. + +Typically, transparent traits are traits +that influence the implementation of inheriting classes and traits that are not usually used as types by themselves. Two examples from the standard collection library are: + +- [`IterableOps`](https://scala-lang.org/api/3.x/scala/collection/IterableOps.html), which provides method implementations for an [`Iterable`](https://scala-lang.org/api/3.x/scala/collection/Iterable.html). +- [`StrictOptimizedSeqOps`](https://scala-lang.org/api/3.x/scala/collection/StrictOptimizedSeqOps.html), which optimises some of these implementations for sequences with efficient indexing. + +Generally, any trait that is extended recursively is a good candidate to be +declared transparent. + +## Rules for Inference + +Transparent traits can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference say that transparent traits are dropped from intersections where possible. + +The precise rules are as follows: + +- When inferring a type of a type variable, or the type of a val, or the return type of a def, +- where that type is not higher-kinded, +- and where `B` is its known upper bound or `Any` if none exists: +- If the type inferred so far is of the form `T1 & ... & Tn` where + `n >= 1`, replace the maximal number of transparent `Ti`s by `Any`, while ensuring that + the resulting type is still a subtype of the bound `B`. +- However, do not perform this widening if all transparent traits `Ti` can get replaced in that way. + +The last clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. diff --git a/docs/_spec/TODOreference/other-new-features/type-test.md b/docs/_spec/TODOreference/other-new-features/type-test.md new file mode 100644 index 000000000000..ec7a87230753 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/type-test.md @@ -0,0 +1,181 @@ +--- +layout: doc-page +title: "TypeTest" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/type-test.html +--- + +## TypeTest + +When pattern matching there are two situations where a runtime type test must be performed. +The first case is an explicit type test using the ascription pattern notation. + +```scala +(x: X) match + case y: Y => +``` + +The second case is when an extractor takes an argument that is not a subtype of the scrutinee type. + +```scala +(x: X) match + case y @ Y(n) => + +object Y: + def unapply(x: Y): Some[Int] = ... +``` + +In both cases, a class test will be performed at runtime. +But when the type test is on an abstract type (type parameter or type member), the test cannot be performed because the type is erased at runtime. + +A [`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) can be provided to make this test possible. + +```scala +package scala.reflect + +trait TypeTest[-S, T]: + def unapply(s: S): Option[s.type & T] +``` + +It provides an extractor that returns its argument typed as a `T` if the argument is a `T`. +It can be used to encode a type test. + +```scala +def f[X, Y](x: X)(using tt: TypeTest[X, Y]): Option[Y] = x match + case tt(x @ Y(1)) => Some(x) + case tt(x) => Some(x) + case _ => None +``` + +To avoid the syntactic overhead the compiler will look for a type test automatically if it detects that the type test is on abstract types. +This means that `x: Y` is transformed to `tt(x)` and `x @ Y(_)` to `tt(x @ Y(_))` if there is a contextual `TypeTest[X, Y]` in scope. +The previous code is equivalent to + +```scala +def f[X, Y](x: X)(using TypeTest[X, Y]): Option[Y] = x match + case x @ Y(1) => Some(x) + case x: Y => Some(x) + case _ => None +``` + +We could create a type test at call site where the type test can be performed with runtime class tests directly as follows + +```scala +val tt: TypeTest[Any, String] = + new TypeTest[Any, String]: + def unapply(s: Any): Option[s.type & String] = s match + case s: String => Some(s) + case _ => None + +f[AnyRef, String]("acb")(using tt) +``` + +The compiler will synthesize a new instance of a type test if none is found in scope as: + +```scala +new TypeTest[A, B]: + def unapply(s: A): Option[s.type & B] = s match + case s: B => Some(s) + case _ => None +``` + +If the type tests cannot be done there will be an unchecked warning that will be raised on the `case s: B =>` test. + +The most common [`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) instances are the ones that take any parameters (i.e. `TypeTest[Any, T]`). +To make it possible to use such instances directly in context bounds we provide the alias + +```scala +package scala.reflect + +type Typeable[T] = TypeTest[Any, T] +``` + +This alias can be used as + +```scala +def f[T: Typeable]: Boolean = + "abc" match + case x: T => true + case _ => false + +f[String] // true +f[Int] // false +``` + +## TypeTest and ClassTag + +[`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) is a replacement for functionality provided previously by `ClassTag.unapply`. +Using [`ClassTag`](https://scala-lang.org/api/3.x/scala/reflect/ClassTag.html) instances was unsound since classtags can check only the class component of a type. +[`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) fixes that unsoundness. +[`ClassTag`](https://scala-lang.org/api/3.x/scala/reflect/ClassTag.html) type tests are still supported but a warning will be emitted after 3.0. + + +## Example + +Given the following abstract definition of Peano numbers that provides two given instances of types `TypeTest[Nat, Zero]` and `TypeTest[Nat, Succ]` + +```scala +import scala.reflect.* + +trait Peano: + type Nat + type Zero <: Nat + type Succ <: Nat + + def safeDiv(m: Nat, n: Succ): (Nat, Nat) + + val Zero: Zero + + val Succ: SuccExtractor + trait SuccExtractor: + def apply(nat: Nat): Succ + def unapply(succ: Succ): Some[Nat] + + given typeTestOfZero: TypeTest[Nat, Zero] + given typeTestOfSucc: TypeTest[Nat, Succ] +``` + +together with an implementation of Peano numbers based on type `Int` + +```scala +object PeanoInt extends Peano: + type Nat = Int + type Zero = Int + type Succ = Int + + def safeDiv(m: Nat, n: Succ): (Nat, Nat) = (m / n, m % n) + + val Zero: Zero = 0 + + val Succ: SuccExtractor = new: + def apply(nat: Nat): Succ = nat + 1 + def unapply(succ: Succ) = Some(succ - 1) + + def typeTestOfZero: TypeTest[Nat, Zero] = new: + def unapply(x: Nat): Option[x.type & Zero] = + if x == 0 then Some(x) else None + + def typeTestOfSucc: TypeTest[Nat, Succ] = new: + def unapply(x: Nat): Option[x.type & Succ] = + if x > 0 then Some(x) else None +``` + +it is possible to write the following program + +```scala +@main def test = + import PeanoInt.* + + def divOpt(m: Nat, n: Nat): Option[(Nat, Nat)] = + n match + case Zero => None + case s @ Succ(_) => Some(safeDiv(m, s)) + + val two = Succ(Succ(Zero)) + val five = Succ(Succ(Succ(two))) + + println(divOpt(five, two)) // prints "Some((2,1))" + println(divOpt(two, five)) // prints "Some((0,2))" + println(divOpt(two, Zero)) // prints "None" +``` + +Note that without the `TypeTest[Nat, Succ]` the pattern `Succ.unapply(nat: Succ)` would be unchecked. diff --git a/docs/_spec/TODOreference/overview.md b/docs/_spec/TODOreference/overview.md new file mode 100644 index 000000000000..b1e8281dfc16 --- /dev/null +++ b/docs/_spec/TODOreference/overview.md @@ -0,0 +1,155 @@ +--- +layout: doc-page +title: "Reference" +nightlyOf: https://docs.scala-lang.org/scala3/reference/overview.html +redirectFrom: overview.html +--- + +Scala 3 implements many language changes and improvements over Scala 2. +In this reference, we discuss design decisions and present important differences compared to Scala 2. + +## Goals + +The language redesign was guided by three main goals: + +- Strengthen Scala's foundations. + Make the full programming language compatible with the foundational work on the + [DOT calculus](https://infoscience.epfl.ch/record/227176/files/soundness_oopsla16.pdf) + and apply the lessons learned from that work. +- Make Scala easier and safer to use. + Tame powerful constructs such as implicits to provide a gentler learning curve. Remove warts and puzzlers. +- Further improve the consistency and expressiveness of Scala's language constructs. + +Corresponding to these goals, the language changes fall into seven categories: +(1) Core constructs to strengthen foundations, (2) simplifications and (3) [restrictions](#restrictions), to make the language easier and safer to use, (4) [dropped constructs](#dropped-constructs) to make the language smaller and more regular, (5) [changed constructs](#changes) to remove warts, and increase consistency and usability, (6) [new constructs](#new-constructs) to fill gaps and increase expressiveness, (7) a new, principled approach to metaprogramming that replaces [Scala 2 experimental macros](https://docs.scala-lang.org/overviews/macros/overview.html). + +## Essential Foundations + +These new constructs directly model core features of DOT, higher-kinded types, and the [SI calculus for implicit resolution](https://infoscience.epfl.ch/record/229878/files/simplicitly_1.pdf). + +- [Intersection types](new-types/intersection-types.md), replacing compound types, +- [Union types](new-types/union-types.md), +- [Type lambdas](new-types/type-lambdas.md), replacing encodings using structural types and type projection. +- [Context functions](contextual/context-functions.md), offering abstraction over given parameters. + +## Simplifications + +These constructs replace existing constructs with the aim of making the language safer and simpler to use, and to promote uniformity in code style. + +- [Trait parameters](other-new-features/trait-parameters.md) + replace [early initializers](dropped-features/early-initializers.md) with a more generally useful construct. +- [Given instances](contextual/givens.md) + replace implicit objects and defs, focussing on intent over mechanism. +- [Using clauses](contextual/using-clauses.md) + replace implicit parameters, avoiding their ambiguities. +- [Extension methods](contextual/extension-methods.md) + replace implicit classes with a clearer and simpler mechanism. +- [Opaque type aliases](other-new-features/opaques.md) + replace most uses of value classes while guaranteeing the absence of boxing. +- [Top-level definitions](dropped-features/package-objects.md) + replace package objects, dropping syntactic boilerplate. +- [Export clauses](other-new-features/export.md) + provide a simple and general way to express aggregation, which can replace + the previous facade pattern of package objects inheriting from classes. +- [Vararg splices](changed-features/vararg-splices.md) + now use the form `xs*` in function arguments and patterns instead of `xs: _*` and `xs @ _*`, +- [Universal apply methods](other-new-features/creator-applications.md) + allow using simple function call syntax instead of `new` expressions. `new` expressions stay around + as a fallback for the cases where creator applications cannot be used. + +With the exception of [early initializers](dropped-features/early-initializers.md) and old-style vararg patterns, all superseded constructs continue to be available in Scala 3.0. The plan is to deprecate and phase them out later. + +Value classes (superseded by opaque type aliases) are a special case. There are currently no deprecation plans for value classes, since we might bring them back in a more general form if they are supported natively by the JVM as is planned by [project Valhalla](https://openjdk.java.net/projects/valhalla/). + +## Restrictions + +These constructs are restricted to make the language safer. + +- [Implicit Conversions](contextual/conversions.md): + there is only one way to define implicit conversions instead of many, and potentially surprising implicit conversions require a language import. +- [Given Imports](contextual/given-imports.md): + implicits now require a special form of import, to make the import clearly visible. +- [Type Projection](dropped-features/type-projection.md): + only classes can be used as prefix `C` of a type projection `C#A`. Type projection on abstract types is no longer supported since it is unsound. +- [Multiversal Equality](contextual/multiversal-equality.md): + implement an "opt-in" scheme to rule out nonsensical comparisons with `==` and `!=`. +- [infix](changed-features/operators.md): + make method application syntax uniform across code bases. + +Unrestricted implicit conversions continue to be available in Scala 3.0, but will be deprecated and removed later. Unrestricted versions of the other constructs in the list above are available only under `-source 3.0-migration`. + +## Dropped Constructs + +These constructs are proposed to be dropped without a new construct replacing them. The motivation for dropping these constructs is to simplify the language and its implementation. + +- [DelayedInit](dropped-features/delayed-init.md), +- [Existential types](dropped-features/existential-types.md), +- [Procedure syntax](dropped-features/procedure-syntax.md), +- [Class shadowing](dropped-features/class-shadowing.md), +- [XML literals](dropped-features/xml.md), +- [Symbol literals](dropped-features/symlits.md), +- [Auto application](dropped-features/auto-apply.md), +- [Weak conformance](dropped-features/weak-conformance.md), +- Compound types (replaced by [Intersection types](new-types/intersection-types.md)), +- [Auto tupling](https://github.com/lampepfl/dotty/pull/4311) (implemented, but not merged). + +The date when these constructs are dropped varies. The current status is: + +- Not implemented at all: + - DelayedInit, existential types, weak conformance. +- Supported under `-source 3.0-migration`: + - procedure syntax, class shadowing, symbol literals, auto application, auto tupling in a restricted form. +- Supported in 3.0, to be deprecated and phased out later: + - [XML literals](dropped-features/xml.md), compound types. + +## Changes + +These constructs have undergone changes to make them more regular and useful. + +- [Structural Types](changed-features/structural-types.md): + They now allow pluggable implementations, which greatly increases their usefulness. Some usage patterns are restricted compared to the status quo. +- [Name-based pattern matching](changed-features/pattern-matching.md): + The existing undocumented Scala 2 implementation has been codified in a slightly simplified form. +- [Automatic Eta expansion](changed-features/eta-expansion.md): + Eta expansion is now performed universally also in the absence of an expected type. The postfix `_` operator is thus made redundant. It will be deprecated and dropped after Scala 3.0. +- [Implicit Resolution](changed-features/implicit-resolution.md): + The implicit resolution rules have been cleaned up to make them more useful and less surprising. Implicit scope is restricted to no longer include package prefixes. + +Most aspects of old-style implicit resolution are still available under `-source 3.0-migration`. The other changes in this list are applied unconditionally. + +## New Constructs + +These are additions to the language that make it more powerful or pleasant to use. + +- [Enums](enums/enums.md) provide concise syntax for enumerations and [algebraic data types](enums/adts.md). +- [Parameter untupling](other-new-features/parameter-untupling.md) avoids having to use `case` for tupled parameter destructuring. +- [Dependent function types](new-types/dependent-function-types.md) generalize dependent methods to dependent function values and types. +- [Polymorphic function types](new-types/polymorphic-function-types.md) generalize polymorphic methods to polymorphic function values and types. + _Current status_: There is a proposal and a merged prototype implementation, but the implementation has not been finalized (it is notably missing type inference support). +- [Kind polymorphism](other-new-features/kind-polymorphism.md) allows the definition of operators working equally on types and type constructors. +- [`@targetName` annotations](other-new-features/targetName.md) make it easier to interoperate with code written in other languages and give more flexibility for avoiding name clashes. + +## Metaprogramming + +The following constructs together aim to put metaprogramming in Scala on a new basis. So far, metaprogramming was achieved by a combination of macros and libraries such as [Shapeless](https://github.com/milessabin/shapeless) that were in turn based on some key macros. Current Scala 2 macro mechanisms are a thin veneer on top of the current Scala 2 compiler, which makes them fragile and in many cases impossible to port to Scala 3. + +It's worth noting that macros were never included in the [Scala 2 language specification](https://scala-lang.org/files/archive/spec/2.13/) and were so far made available only under an `-experimental` flag. This has not prevented their widespread usage. + +To enable porting most uses of macros, we are experimenting with the advanced language constructs listed below. These designs are more provisional than the rest of the proposed language constructs for Scala 3.0. There might still be some changes until the final release. Stabilizing the feature set needed for metaprogramming is our first priority. + +- [Match Types](new-types/match-types.md) + allow computation on types. +- [Inline](metaprogramming/inline.md) + provides by itself a straightforward implementation of some simple macros and is at the same time an essential building block for the implementation of complex macros. +- [Quotes and Splices](metaprogramming/macros.md) + provide a principled way to express macros and staging with a unified set of abstractions. +- [Type class derivation](contextual/derivation.md) + provides an in-language implementation of the `Gen` macro in Shapeless and other foundational libraries. The new implementation is more robust, efficient and easier to use than the macro. +- [By-name context parameters](contextual/by-name-context-parameters.md) + provide a more robust in-language implementation of the `Lazy` macro in [Shapeless](https://github.com/milessabin/shapeless). + +## See Also + +[A classification of proposed language features](./features-classification.md) is +an expanded version of this page that adds the status (i.e. relative importance to be a part of Scala 3, and relative urgency when to decide this) and expected migration cost +of each language construct. diff --git a/docs/_spec/TODOreference/soft-modifier.md b/docs/_spec/TODOreference/soft-modifier.md new file mode 100644 index 000000000000..c1329ebab1f0 --- /dev/null +++ b/docs/_spec/TODOreference/soft-modifier.md @@ -0,0 +1,27 @@ +--- +layout: doc-page +title: "Soft Keywords" +nightlyOf: https://docs.scala-lang.org/scala3/reference/soft-modifier.html +--- + +A soft modifier is one of the identifiers `infix`, `inline`, `opaque`, `open` and `transparent`. + +A soft keyword is a soft modifier, or one of `as`, `derives`, `end`, `extension`, `throws`, `using`, `|`, `+`, `-`, `*` + +A soft modifier is treated as potential modifier of a definition if it is followed by a hard modifier or a keyword combination starting a definition (`def`, `val`, `var`, `type`, `given`, `class`, `trait`, `object`, `enum`, `case class`, `case object`). Between the two words there may be a sequence of newline tokens and soft modifiers. + +Otherwise, soft keywords are treated specially in the following situations: + + - `inline`, if it is followed by any token that can start an expression. + - `derives`, if it appears after an extension clause or after + the name and possibly parameters of a class, trait, object, or enum definition. + - `end`, if it appears at the start of a line following a statement (i.e. definition or toplevel expression) + - `extension`, if it appears at the start of a statement and is followed by `(` or `[`. + - `using`, if it appears at the start of a parameter or argument list. + - `as`, in a renaming import clause + - `|`, if it separates two patterns in an alternative. + - `+`, `-`, if they appear in front of a type parameter. + - `*`, in a wildcard import, or it follows the type of a parameter, or if it appears in + a vararg splice `x*`. + +Everywhere else a soft keyword is treated as a normal identifier. diff --git a/docs/_spec/TODOreference/syntax.md b/docs/_spec/TODOreference/syntax.md new file mode 100644 index 000000000000..d3526783a5eb --- /dev/null +++ b/docs/_spec/TODOreference/syntax.md @@ -0,0 +1,472 @@ +--- +layout: doc-page +title: "Scala 3 Syntax Summary" +nightlyOf: https://docs.scala-lang.org/scala3/reference/syntax.html +--- + + + +The following description of Scala tokens uses literal characters `‘c’` when +referring to the ASCII fragment `\u0000` – `\u007F`. + +_Unicode escapes_ are used to represent the [Unicode character](https://www.w3.org/International/articles/definitions-characters/) with the given +hexadecimal code: + +``` +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ +``` + +Informal descriptions are typeset as `“some comment”`. + +## Lexical Syntax + +The lexical syntax of Scala is given by the following grammar in EBNF +form. + +``` +whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ +upper ::= ‘A’ | ... | ‘Z’ | ‘\$’ | ‘_’ “... and Unicode category Lu” +lower ::= ‘a’ | ... | ‘z’ “... and Unicode category Ll” +letter ::= upper | lower “... and Unicode categories Lo, Lt, Nl” +digit ::= ‘0’ | ... | ‘9’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ +delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ +opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | + ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ + “... and Unicode categories Sm, So” +printableChar ::= “all characters in [\u0020, \u007E] inclusive” +charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) + +op ::= opchar {opchar} +varid ::= lower idrest +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] +quoteId ::= ‘'’ alphaid +spliceId ::= ‘$’ alphaid ; + +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | nonZeroDigit [{digit | ‘_’} digit] +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +nonZeroDigit ::= ‘1’ | ... | ‘9’ + +floatingPointLiteral + ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] + | decimalNumeral exponentPart [floatType] + | decimalNumeral floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ + +booleanLiteral ::= ‘true’ | ‘false’ + +characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ + +stringLiteral ::= ‘"’ {stringElement} ‘"’ + | ‘"""’ multiLineChars ‘"""’ +stringElement ::= printableChar \ (‘"’ | ‘\’) + | UnicodeEscape + | charEscapeSeq +multiLineChars ::= {[‘"’] [‘"’] char \ ‘"’} {‘"’} +processedStringLiteral + ::= alphaid ‘"’ {[‘\’] processedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +processedStringPart + ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘$$’ + | ‘$’ letter { letter | digit } + | ‘{’ Block [‘;’ whiteSpace stringFormat whiteSpace] ‘}’ +stringFormat ::= {printableChar \ (‘"’ | ‘}’ | ‘ ’ | ‘\t’ | ‘\n’)} + +symbolLiteral ::= ‘'’ plainid // until 2.13 + +comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ + | ‘//’ “any sequence of characters up to end of line” + +nl ::= “new line character” +semi ::= ‘;’ | nl {nl} +``` + +## Optional Braces + +The lexical analyzer also inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). + +In the context-free productions below we use the notation `<<< ts >>>` +to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. Analogously, the +notation `:<<< ts >>>` indicates a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent` that follows +a `colon` token. + +A `colon` token reads as the standard colon "`:`" but is generated instead of it where `colon` is legal according to the context free syntax, but only if the previous token +is an alphanumeric identifier, a backticked identifier, or one of the tokens `this`, `super`, `new`, "`)`", and "`]`". + +``` +colon ::= ':' -- with side conditions explained above + <<< ts >>> ::= ‘{’ ts ‘}’ + | indent ts outdent +:<<< ts >>> ::= [nl] ‘{’ ts ‘}’ + | colon indent ts outdent +``` + +## Keywords + +### Regular keywords + +``` +abstract case catch class def do else +enum export extends false final finally for +given if implicit import lazy match new +null object override package private protected return +sealed super then throw trait true try +type val var while with yield +: = <- => <: >: # +@ =>> ?=> +``` + +### Soft keywords + +``` +as derives end extension infix inline opaque open transparent using | * + - +``` + +See the [separate section on soft keywords](./soft-modifier.md) for additional +details on where a soft keyword is recognized. + +## Context-free Syntax + +The context-free syntax of Scala is given by the following EBNF +grammar: + +### Literals and Paths +``` +SimpleLiteral ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral +Literal ::= SimpleLiteral + | processedStringLiteral + | symbolLiteral + | ‘null’ + +QualId ::= id {‘.’ id} +ids ::= id {‘,’ id} + +SimpleRef ::= id + | [id ‘.’] ‘this’ + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id + +ClassQualifier ::= ‘[’ id ‘]’ +``` + +### Types +``` +Type ::= FunType + | HkTypeParamClause ‘=>>’ Type + | FunParamClause ‘=>>’ Type + | MatchType + | InfixType +FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type + | HKTypeParamClause '=>' Type +FunTypeArgs ::= InfixType + | ‘(’ [ FunArgTypes ] ‘)’ + | FunParamClause +FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +MatchType ::= InfixType `match` <<< TypeCaseClauses >>> +InfixType ::= RefinedType {id [nl] RefinedType} +RefinedType ::= AnnotType {[nl] Refinement} +AnnotType ::= SimpleType {Annotation} + +SimpleType ::= SimpleLiteral + | ‘?’ TypeBounds + | id + | Singleton ‘.’ id + | Singleton ‘.’ ‘type’ + | ‘(’ Types ‘)’ + | Refinement + | SimpleType1 TypeArgs + | SimpleType1 ‘#’ id +Singleton ::= SimpleRef + | SimpleLiteral + | Singleton ‘.’ id + +FunArgType ::= Type + | ‘=>’ Type +FunArgTypes ::= FunArgType { ‘,’ FunArgType } +ParamType ::= [‘=>’] ParamValueType +ParamValueType ::= Type [‘*’] +TypeArgs ::= ‘[’ Types ‘]’ +Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> +TypeBounds ::= [‘>:’ Type] [‘<:’ Type] +TypeParamBounds ::= TypeBounds {‘:’ Type} +Types ::= Type {‘,’ Type} +``` + +### Expressions +``` +Expr ::= FunParams (‘=>’ | ‘?=>’) Expr + | HkTypeParamClause ‘=>’ Expr + | Expr1 +BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block + | HkTypeParamClause ‘=>’ Block + | Expr1 +FunParams ::= Bindings + | id + | ‘_’ +Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] + | [‘inline’] ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] + | ‘while’ ‘(’ Expr ‘)’ {nl} Expr + | ‘while’ Expr ‘do’ Expr + | ‘try’ Expr Catches [‘finally’ Expr] + | ‘try’ Expr [‘finally’ Expr] + | ‘throw’ Expr + | ‘return’ [Expr] + | ForExpr + | [SimpleExpr ‘.’] id ‘=’ Expr + | PrefixOperator SimpleExpr ‘=’ Expr + | SimpleExpr ArgumentExprs ‘=’ Expr + | PostfixExpr [Ascription] + | ‘inline’ InfixExpr MatchClause +Ascription ::= ‘:’ InfixType + | ‘:’ Annotation {Annotation} +Catches ::= ‘catch’ (Expr | ExprCaseClause) +PostfixExpr ::= InfixExpr [id] -- only if language.postfixOperators is enabled +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr + | InfixExpr MatchClause +MatchClause ::= ‘match’ <<< CaseClauses >>> +PrefixExpr ::= [PrefixOperator] SimpleExpr +PrefixOperator ::= ‘-’ | ‘+’ | ‘~’ | ‘!’ -- unless backquoted +SimpleExpr ::= SimpleRef + | Literal + | ‘_’ + | BlockExpr + | ExprSplice + | Quoted + | quoteId -- only inside splices + | ‘new’ ConstrApp {‘with’ ConstrApp} [TemplateBody] + | ‘new’ TemplateBody + | ‘(’ ExprsInParens ‘)’ + | SimpleExpr ‘.’ id + | SimpleExpr ‘.’ MatchClause + | SimpleExpr TypeArgs + | SimpleExpr ArgumentExprs +Quoted ::= ‘'’ ‘{’ Block ‘}’ + | ‘'’ ‘[’ Type ‘]’ +ExprSplice ::= spliceId -- if inside quoted block + | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern + | ‘$’ ‘{’ Pattern ‘}’ -- when inside quoted pattern +ExprsInParens ::= ExprInParens {‘,’ ExprInParens} +ExprInParens ::= PostfixExpr ‘:’ Type + | Expr +ParArgumentExprs ::= ‘(’ [ExprsInParens] ‘)’ + | ‘(’ ‘using’ ExprsInParens ‘)’ + | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ +ArgumentExprs ::= ParArgumentExprs + | BlockExpr +BlockExpr ::= <<< (CaseClauses | Block) >>> +Block ::= {BlockStat semi} [BlockResult] +BlockStat ::= Import + | {Annotation {nl}} {LocalModifier} Def + | Extension + | Expr1 + | EndMarker + +ForExpr ::= ‘for’ ‘(’ Enumerators0 ‘)’ {nl} [‘do‘ | ‘yield’] Expr + | ‘for’ ‘{’ Enumerators0 ‘}’ {nl} [‘do‘ | ‘yield’] Expr + | ‘for’ Enumerators0 (‘do‘ | ‘yield’) Expr +Enumerators0 ::= {nl} Enumerators [semi] +Enumerators ::= Generator {semi Enumerator | Guard} +Enumerator ::= Generator + | Guard {Guard} + | Pattern1 ‘=’ Expr +Generator ::= [‘case’] Pattern1 ‘<-’ Expr +Guard ::= ‘if’ PostfixExpr + +CaseClauses ::= CaseClause { CaseClause } +CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block +ExprCaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Expr +TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } +TypeCaseClause ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi] + +Pattern ::= Pattern1 { ‘|’ Pattern1 } +Pattern1 ::= Pattern2 [‘:’ RefinedType] +Pattern2 ::= [id ‘@’] InfixPattern [‘*’] +InfixPattern ::= SimplePattern { id [nl] SimplePattern } +SimplePattern ::= PatVar + | Literal + | ‘(’ [Patterns] ‘)’ + | Quoted + | SimplePattern1 [TypeArgs] [ArgumentPatterns] + | ‘given’ RefinedType +SimplePattern1 ::= SimpleRef + | SimplePattern1 ‘.’ id +PatVar ::= varid + | ‘_’ +Patterns ::= Pattern {‘,’ Pattern} +ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ + | ‘(’ [Patterns ‘,’] PatVar ‘*’ ‘)’ +``` + +### Type and Value Parameters +``` +ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ +ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeParamBounds + +DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds + +TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ +TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds + +HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ +HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypeParamClause] | ‘_’) TypeBounds + +ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] +ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ + | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ +ClsParams ::= ClsParam {‘,’ ClsParam} +ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] + +DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] +DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | FunArgTypes) ‘)’ +DefParams ::= DefParam {‘,’ DefParam} +DefParam ::= {Annotation} [‘inline’] Param +``` + +### Bindings and Imports +``` +Bindings ::= ‘(’ [Binding {‘,’ Binding}] ‘)’ +Binding ::= (id | ‘_’) [‘:’ Type] + +Modifier ::= LocalModifier + | AccessModifier + | ‘override’ + | ‘opaque’ +LocalModifier ::= ‘abstract’ + | ‘final’ + | ‘sealed’ + | ‘open’ + | ‘implicit’ + | ‘lazy’ + | ‘inline’ +AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] +AccessQualifier ::= ‘[’ id ‘]’ + +Annotation ::= ‘@’ SimpleType1 {ParArgumentExprs} + +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec + | SimpleRef ‘as’ id +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*' | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} + +EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL +EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ + | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ +``` + +### Declarations and Definitions +``` +RefineDcl ::= ‘val’ ValDcl + | ‘def’ DefDcl + | ‘type’ {nl} TypeDcl +Dcl ::= RefineDcl + | ‘var’ VarDcl +ValDcl ::= ids ‘:’ Type +VarDcl ::= ids ‘:’ Type +DefDcl ::= DefSig ‘:’ Type +DefSig ::= id [DefTypeParamClause] DefParamClauses +TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] + +Def ::= ‘val’ PatDef + | ‘var’ PatDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDcl + | TmplDef +PatDef ::= ids [‘:’ Type] ‘=’ Expr + | Pattern2 [‘:’ Type] ‘=’ Expr +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr + | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr + +TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘enum’ EnumDef + | ‘given’ GivenDef +ClassDef ::= id ClassConstr [Template] +ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses +ConstrMods ::= {Annotation} [AccessModifier] +ObjectDef ::= id [Template] +EnumDef ::= id ClassConstr InheritClauses EnumBody +GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefParamClause`, `UsingParamClause` must be present +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] +Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} + ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods +ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef + | Export +Template ::= InheritClauses [TemplateBody] +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) +ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrExpr ::= SelfInvocation + | <<< SelfInvocation {semi BlockStat} >>> +SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} + +WithTemplateBody ::= <<< [SelfType] TemplateStat {semi TemplateStat} >>> +TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> +TemplateStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | {Annotation [nl]} {Modifier} Dcl + | Extension + | Expr1 + | EndMarker + | +SelfType ::= id [‘:’ InfixType] ‘=>’ + | ‘this’ ‘:’ InfixType ‘=>’ + +EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> +EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase +EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps]] | ids) + +TopStats ::= TopStat {semi TopStat} +TopStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | Extension + | Packaging + | PackageObject + | EndMarker + | +Packaging ::= ‘package’ QualId :<<< TopStats >>> +PackageObject ::= ‘package’ ‘object’ ObjectDef + +CompilationUnit ::= {‘package’ QualId semi} TopStats +``` diff --git a/docs/_spec/_config.yml b/docs/_spec/_config.yml new file mode 100644 index 000000000000..bd1f691c65d0 --- /dev/null +++ b/docs/_spec/_config.yml @@ -0,0 +1,11 @@ +baseurl: /files/archive/spec/2.13 +latestScalaVersion: 2.13 +thisScalaVersion: 2.13 +versionCompareMessage: "an upcoming" +safe: true +lsi: false +highlighter: false +markdown: redcarpet +encoding: utf-8 +redcarpet: + extensions: ["no_intra_emphasis", "fenced_code_blocks", "autolink", "tables", "with_toc_data", "strikethrough", "lax_spacing", "space_after_headers", "superscript", "footnotes"] diff --git a/docs/_spec/_includes/numbering.css b/docs/_spec/_includes/numbering.css new file mode 100644 index 000000000000..2a22ce28b558 --- /dev/null +++ b/docs/_spec/_includes/numbering.css @@ -0,0 +1,60 @@ +h1 { + /* must reset here */ + counter-reset: chapter {{ page.chapter }}; +} +h1:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + content: "Chapter " counter(chapter); + display: block; +} + +h2 { + /* must increment here */ + counter-increment: section; + counter-reset: subsection; +} +h2:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: counter(chapter) "." counter(section) ; + display: inline; + margin-right: 1em; +} +h2:after { + /* can only have one counter-reset per tag, so can't do it in h2/h2:before... */ + counter-reset: example; +} + +h3 { + /* must increment here */ + counter-increment: subsection; +} +h3:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: counter(chapter) "." counter(section) "." counter(subsection); + display: inline; + margin-right: 1em; +} +h3[id*='example'] { + /* must increment here */ + counter-increment: example; + display: inline; +} +h3[id*='example']:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: "Example " counter(chapter) "." counter(section) "." counter(example); + display: inline; + margin-right: 1em; +} + +.no-numbering, .no-numbering:before, .no-numbering:after { + content: normal; + counter-reset: none; + counter-increment: none; +} diff --git a/docs/_spec/_includes/table-of-contents.yml b/docs/_spec/_includes/table-of-contents.yml new file mode 100644 index 000000000000..b70f97da5424 --- /dev/null +++ b/docs/_spec/_includes/table-of-contents.yml @@ -0,0 +1,23 @@ + +
+ +

Table of Contents

+ +
    + {% assign sorted_pages = site.pages | sort:"name" %} + {% for post in sorted_pages %} + + {% if post.chapter >= 0 %} +
  1. + {{ post.title }} +
  2. + {% endif %} + {% endfor %} +
+
+ + diff --git a/docs/_spec/_includes/version-notice.yml b/docs/_spec/_includes/version-notice.yml new file mode 100644 index 000000000000..5a7286631c11 --- /dev/null +++ b/docs/_spec/_includes/version-notice.yml @@ -0,0 +1,3 @@ +{% if site.thisScalaVersion != site.latestScalaVersion %} +
This is the specification of {{ site.versionCompareMessage }} version of Scala. See the Scala {{ site.latestScalaVersion }} spec.
+{% endif %} diff --git a/docs/_spec/_layouts/default.yml b/docs/_spec/_layouts/default.yml new file mode 100644 index 000000000000..2589a105dff2 --- /dev/null +++ b/docs/_spec/_layouts/default.yml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + {{ page.title }} | Scala {{ site.thisScalaVersion }} + + + +
+ +
+ + +
+{% include version-notice.yml %} +{{ content }} +
+ + + + + + + diff --git a/docs/_spec/_layouts/toc.yml b/docs/_spec/_layouts/toc.yml new file mode 100644 index 000000000000..1106222bd088 --- /dev/null +++ b/docs/_spec/_layouts/toc.yml @@ -0,0 +1,34 @@ + + + + + + + + + + {{ page.title }} | Scala {{ site.thisScalaVersion }} + + + + + + + + +
+
+ + Scala Language Specification + Edit at GitHub +
+
Version {{ site.thisScalaVersion }}
+
+
+{% include version-notice.yml %} +{{ content }} +
+ + + + diff --git a/docs/_spec/docker-compose.yml b/docs/_spec/docker-compose.yml new file mode 100644 index 000000000000..3eadc939ed40 --- /dev/null +++ b/docs/_spec/docker-compose.yml @@ -0,0 +1,11 @@ +version: '2' + +services: + jekyll: + user: "${UID}:${GID}" + build: . + command: sh -c "chown $UID / && bundle exec jekyll serve --incremental --host=0.0.0.0 " + ports: + - '4000:4000' + volumes: + - .:/srv/jekyll diff --git a/docs/_spec/index.md b/docs/_spec/index.md new file mode 100644 index 000000000000..df126db7bd44 --- /dev/null +++ b/docs/_spec/index.md @@ -0,0 +1,55 @@ +--- +title: Scala Language Specification +layout: toc +--- + +{% include table-of-contents.yml %} + +#### Authors and Contributors + +Martin Odersky, Philippe Altherr, Vincent Cremet, Gilles Dubochet, Burak Emir, Philipp Haller, Stéphane Micheloud, Nikolay Mihaylov, Adriaan Moors, Lukas Rytz, Michel Schinz, Erik Stenman, Matthias Zenger + +Markdown Conversion by Iain McGinniss. + +#### Preface + +Scala is a Java-like programming language which unifies +object-oriented and functional programming. It is a pure +object-oriented language in the sense that every value is an +object. Types and behavior of objects are described by +classes. Classes can be composed using mixin composition. Scala is +designed to work seamlessly with less pure but mainstream +object-oriented languages like Java. + +Scala is a functional language in the sense that every function is a +value. Nesting of function definitions and higher-order functions are +naturally supported. Scala also supports a general notion of pattern +matching which can model the algebraic types used in many functional +languages. + +Scala has been designed to interoperate seamlessly with Java. +Scala classes can call Java methods, create Java objects, inherit from Java +classes and implement Java interfaces. None of this requires interface +definitions or glue code. + +Scala has been developed from 2001 in the programming methods +laboratory at EPFL. Version 1.0 was released in November 2003. This +document describes the second version of the language, which was +released in March 2006. It acts as a reference for the language +definition and some core library modules. It is not intended to teach +Scala or its concepts; for this there are [other documents](14-references.html). + +Scala has been a collective effort of many people. The design and the +implementation of version 1.0 was completed by Philippe Altherr, +Vincent Cremet, Gilles Dubochet, Burak Emir, Stéphane Micheloud, +Nikolay Mihaylov, Michel Schinz, Erik Stenman, Matthias Zenger, and +the author. Iulian Dragos, Gilles Dubochet, Philipp Haller, Sean +McDirmid, Lex Spoon, and Geoffrey Washburn joined in the effort to +develop the second version of the language and tools. Gilad Bracha, +Craig Chambers, Erik Ernst, Matthias Felleisen, Shriram Krishnamurti, +Gary Leavens, Sebastian Maneth, Erik Meijer, Klaus Ostermann, Didier +Rémy, Mads Torgersen, and Philip Wadler have shaped the design of +the language through lively and inspiring discussions and comments on +previous versions of this document. The contributors to the Scala +mailing list have also given very useful feedback that helped us +improve the language and its tools. diff --git a/docs/_spec/public/favicon.ico b/docs/_spec/public/favicon.ico new file mode 100644 index 000000000000..9eb6ef516488 Binary files /dev/null and b/docs/_spec/public/favicon.ico differ diff --git a/docs/_spec/public/fonts/Heuristica-Bold.woff b/docs/_spec/public/fonts/Heuristica-Bold.woff new file mode 100644 index 000000000000..904579683d54 Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-Bold.woff differ diff --git a/docs/_spec/public/fonts/Heuristica-BoldItalic.woff b/docs/_spec/public/fonts/Heuristica-BoldItalic.woff new file mode 100644 index 000000000000..a3c523445375 Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-BoldItalic.woff differ diff --git a/docs/_spec/public/fonts/Heuristica-Regular.woff b/docs/_spec/public/fonts/Heuristica-Regular.woff new file mode 100644 index 000000000000..f5c1f8b2dbc5 Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-Regular.woff differ diff --git a/docs/_spec/public/fonts/Heuristica-RegularItalic.woff b/docs/_spec/public/fonts/Heuristica-RegularItalic.woff new file mode 100644 index 000000000000..d2c8664593dc Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-RegularItalic.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-Bold.woff b/docs/_spec/public/fonts/LuxiMono-Bold.woff new file mode 100644 index 000000000000..8581bb5aa458 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-Bold.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-BoldOblique.woff b/docs/_spec/public/fonts/LuxiMono-BoldOblique.woff new file mode 100644 index 000000000000..607ccf5cd030 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-BoldOblique.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-Regular.woff b/docs/_spec/public/fonts/LuxiMono-Regular.woff new file mode 100644 index 000000000000..a478ad9ef2dd Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-Regular.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-RegularOblique.woff b/docs/_spec/public/fonts/LuxiMono-RegularOblique.woff new file mode 100644 index 000000000000..26999f990fa9 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-RegularOblique.woff differ diff --git a/docs/_spec/public/fonts/LuxiSans-Bold.woff b/docs/_spec/public/fonts/LuxiSans-Bold.woff new file mode 100644 index 000000000000..162621568b53 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiSans-Bold.woff differ diff --git a/docs/_spec/public/fonts/LuxiSans-Regular.woff b/docs/_spec/public/fonts/LuxiSans-Regular.woff new file mode 100644 index 000000000000..89d980218f7a Binary files /dev/null and b/docs/_spec/public/fonts/LuxiSans-Regular.woff differ diff --git a/docs/_spec/public/images/classhierarchy.pdf b/docs/_spec/public/images/classhierarchy.pdf new file mode 100644 index 000000000000..58e050174b65 Binary files /dev/null and b/docs/_spec/public/images/classhierarchy.pdf differ diff --git a/docs/_spec/public/images/classhierarchy.png b/docs/_spec/public/images/classhierarchy.png new file mode 100644 index 000000000000..3da25ecbf2d5 Binary files /dev/null and b/docs/_spec/public/images/classhierarchy.png differ diff --git a/docs/_spec/public/images/github-logo@2x.png b/docs/_spec/public/images/github-logo@2x.png new file mode 100644 index 000000000000..285b0fee2f32 Binary files /dev/null and b/docs/_spec/public/images/github-logo@2x.png differ diff --git a/docs/_spec/public/images/scala-spiral-white.png b/docs/_spec/public/images/scala-spiral-white.png new file mode 100644 index 000000000000..46aaf80824c1 Binary files /dev/null and b/docs/_spec/public/images/scala-spiral-white.png differ diff --git a/docs/_spec/public/octicons/LICENSE.txt b/docs/_spec/public/octicons/LICENSE.txt new file mode 100644 index 000000000000..259b43d14de3 --- /dev/null +++ b/docs/_spec/public/octicons/LICENSE.txt @@ -0,0 +1,9 @@ +(c) 2012-2014 GitHub + +When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos) + +Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) +Applies to all font files + +Code License: MIT (http://choosealicense.com/licenses/mit/) +Applies to all other files diff --git a/docs/_spec/public/octicons/octicons.css b/docs/_spec/public/octicons/octicons.css new file mode 100644 index 000000000000..a5dcd153a856 --- /dev/null +++ b/docs/_spec/public/octicons/octicons.css @@ -0,0 +1,235 @@ +@font-face { + font-family: 'octicons'; + src: url('octicons.eot?#iefix') format('embedded-opentype'), + url('octicons.woff') format('woff'), + url('octicons.ttf') format('truetype'), + url('octicons.svg#octicons') format('svg'); + font-weight: normal; + font-style: normal; +} + +/* + +.octicon is optimized for 16px. +.mega-octicon is optimized for 32px but can be used larger. + +*/ +.octicon, .mega-octicon { + font: normal normal normal 16px/1 octicons; + display: inline-block; + text-decoration: none; + text-rendering: auto; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} +.mega-octicon { font-size: 32px; } + + +.octicon-alert:before { content: '\f02d'} /*  */ +.octicon-alignment-align:before { content: '\f08a'} /*  */ +.octicon-alignment-aligned-to:before { content: '\f08e'} /*  */ +.octicon-alignment-unalign:before { content: '\f08b'} /*  */ +.octicon-arrow-down:before { content: '\f03f'} /*  */ +.octicon-arrow-left:before { content: '\f040'} /*  */ +.octicon-arrow-right:before { content: '\f03e'} /*  */ +.octicon-arrow-small-down:before { content: '\f0a0'} /*  */ +.octicon-arrow-small-left:before { content: '\f0a1'} /*  */ +.octicon-arrow-small-right:before { content: '\f071'} /*  */ +.octicon-arrow-small-up:before { content: '\f09f'} /*  */ +.octicon-arrow-up:before { content: '\f03d'} /*  */ +.octicon-beer:before { content: '\f069'} /*  */ +.octicon-book:before { content: '\f007'} /*  */ +.octicon-bookmark:before { content: '\f07b'} /*  */ +.octicon-briefcase:before { content: '\f0d3'} /*  */ +.octicon-broadcast:before { content: '\f048'} /*  */ +.octicon-browser:before { content: '\f0c5'} /*  */ +.octicon-bug:before { content: '\f091'} /*  */ +.octicon-calendar:before { content: '\f068'} /*  */ +.octicon-check:before { content: '\f03a'} /*  */ +.octicon-checklist:before { content: '\f076'} /*  */ +.octicon-chevron-down:before { content: '\f0a3'} /*  */ +.octicon-chevron-left:before { content: '\f0a4'} /*  */ +.octicon-chevron-right:before { content: '\f078'} /*  */ +.octicon-chevron-up:before { content: '\f0a2'} /*  */ +.octicon-circle-slash:before { content: '\f084'} /*  */ +.octicon-circuit-board:before { content: '\f0d6'} /*  */ +.octicon-clippy:before { content: '\f035'} /*  */ +.octicon-clock:before { content: '\f046'} /*  */ +.octicon-cloud-download:before { content: '\f00b'} /*  */ +.octicon-cloud-upload:before { content: '\f00c'} /*  */ +.octicon-code:before { content: '\f05f'} /*  */ +.octicon-color-mode:before { content: '\f065'} /*  */ +.octicon-comment-add:before, +.octicon-comment:before { content: '\f02b'} /*  */ +.octicon-comment-discussion:before { content: '\f04f'} /*  */ +.octicon-credit-card:before { content: '\f045'} /*  */ +.octicon-dash:before { content: '\f0ca'} /*  */ +.octicon-dashboard:before { content: '\f07d'} /*  */ +.octicon-database:before { content: '\f096'} /*  */ +.octicon-device-camera:before { content: '\f056'} /*  */ +.octicon-device-camera-video:before { content: '\f057'} /*  */ +.octicon-device-desktop:before { content: '\f27c'} /*  */ +.octicon-device-mobile:before { content: '\f038'} /*  */ +.octicon-diff:before { content: '\f04d'} /*  */ +.octicon-diff-added:before { content: '\f06b'} /*  */ +.octicon-diff-ignored:before { content: '\f099'} /*  */ +.octicon-diff-modified:before { content: '\f06d'} /*  */ +.octicon-diff-removed:before { content: '\f06c'} /*  */ +.octicon-diff-renamed:before { content: '\f06e'} /*  */ +.octicon-ellipsis:before { content: '\f09a'} /*  */ +.octicon-eye-unwatch:before, +.octicon-eye-watch:before, +.octicon-eye:before { content: '\f04e'} /*  */ +.octicon-file-binary:before { content: '\f094'} /*  */ +.octicon-file-code:before { content: '\f010'} /*  */ +.octicon-file-directory:before { content: '\f016'} /*  */ +.octicon-file-media:before { content: '\f012'} /*  */ +.octicon-file-pdf:before { content: '\f014'} /*  */ +.octicon-file-submodule:before { content: '\f017'} /*  */ +.octicon-file-symlink-directory:before { content: '\f0b1'} /*  */ +.octicon-file-symlink-file:before { content: '\f0b0'} /*  */ +.octicon-file-text:before { content: '\f011'} /*  */ +.octicon-file-zip:before { content: '\f013'} /*  */ +.octicon-flame:before { content: '\f0d2'} /*  */ +.octicon-fold:before { content: '\f0cc'} /*  */ +.octicon-gear:before { content: '\f02f'} /*  */ +.octicon-gift:before { content: '\f042'} /*  */ +.octicon-gist:before { content: '\f00e'} /*  */ +.octicon-gist-secret:before { content: '\f08c'} /*  */ +.octicon-git-branch-create:before, +.octicon-git-branch-delete:before, +.octicon-git-branch:before { content: '\f020'} /*  */ +.octicon-git-commit:before { content: '\f01f'} /*  */ +.octicon-git-compare:before { content: '\f0ac'} /*  */ +.octicon-git-merge:before { content: '\f023'} /*  */ +.octicon-git-pull-request-abandoned:before, +.octicon-git-pull-request:before { content: '\f009'} /*  */ +.octicon-globe:before { content: '\f0b6'} /*  */ +.octicon-graph:before { content: '\f043'} /*  */ +.octicon-heart:before { content: '\2665'} /* ♥ */ +.octicon-history:before { content: '\f07e'} /*  */ +.octicon-home:before { content: '\f08d'} /*  */ +.octicon-horizontal-rule:before { content: '\f070'} /*  */ +.octicon-hourglass:before { content: '\f09e'} /*  */ +.octicon-hubot:before { content: '\f09d'} /*  */ +.octicon-inbox:before { content: '\f0cf'} /*  */ +.octicon-info:before { content: '\f059'} /*  */ +.octicon-issue-closed:before { content: '\f028'} /*  */ +.octicon-issue-opened:before { content: '\f026'} /*  */ +.octicon-issue-reopened:before { content: '\f027'} /*  */ +.octicon-jersey:before { content: '\f019'} /*  */ +.octicon-jump-down:before { content: '\f072'} /*  */ +.octicon-jump-left:before { content: '\f0a5'} /*  */ +.octicon-jump-right:before { content: '\f0a6'} /*  */ +.octicon-jump-up:before { content: '\f073'} /*  */ +.octicon-key:before { content: '\f049'} /*  */ +.octicon-keyboard:before { content: '\f00d'} /*  */ +.octicon-law:before { content: '\f0d8'} /* */ +.octicon-light-bulb:before { content: '\f000'} /*  */ +.octicon-link:before { content: '\f05c'} /*  */ +.octicon-link-external:before { content: '\f07f'} /*  */ +.octicon-list-ordered:before { content: '\f062'} /*  */ +.octicon-list-unordered:before { content: '\f061'} /*  */ +.octicon-location:before { content: '\f060'} /*  */ +.octicon-gist-private:before, +.octicon-mirror-private:before, +.octicon-git-fork-private:before, +.octicon-lock:before { content: '\f06a'} /*  */ +.octicon-logo-github:before { content: '\f092'} /*  */ +.octicon-mail:before { content: '\f03b'} /*  */ +.octicon-mail-read:before { content: '\f03c'} /*  */ +.octicon-mail-reply:before { content: '\f051'} /*  */ +.octicon-mark-github:before { content: '\f00a'} /*  */ +.octicon-markdown:before { content: '\f0c9'} /*  */ +.octicon-megaphone:before { content: '\f077'} /*  */ +.octicon-mention:before { content: '\f0be'} /*  */ +.octicon-microscope:before { content: '\f089'} /*  */ +.octicon-milestone:before { content: '\f075'} /*  */ +.octicon-mirror-public:before, +.octicon-mirror:before { content: '\f024'} /*  */ +.octicon-mortar-board:before { content: '\f0d7'} /* */ +.octicon-move-down:before { content: '\f0a8'} /*  */ +.octicon-move-left:before { content: '\f074'} /*  */ +.octicon-move-right:before { content: '\f0a9'} /*  */ +.octicon-move-up:before { content: '\f0a7'} /*  */ +.octicon-mute:before { content: '\f080'} /*  */ +.octicon-no-newline:before { content: '\f09c'} /*  */ +.octicon-octoface:before { content: '\f008'} /*  */ +.octicon-organization:before { content: '\f037'} /*  */ +.octicon-package:before { content: '\f0c4'} /*  */ +.octicon-paintcan:before { content: '\f0d1'} /*  */ +.octicon-pencil:before { content: '\f058'} /*  */ +.octicon-person-add:before, +.octicon-person-follow:before, +.octicon-person:before { content: '\f018'} /*  */ +.octicon-pin:before { content: '\f041'} /*  */ +.octicon-playback-fast-forward:before { content: '\f0bd'} /*  */ +.octicon-playback-pause:before { content: '\f0bb'} /*  */ +.octicon-playback-play:before { content: '\f0bf'} /*  */ +.octicon-playback-rewind:before { content: '\f0bc'} /*  */ +.octicon-plug:before { content: '\f0d4'} /*  */ +.octicon-repo-create:before, +.octicon-gist-new:before, +.octicon-file-directory-create:before, +.octicon-file-add:before, +.octicon-plus:before { content: '\f05d'} /*  */ +.octicon-podium:before { content: '\f0af'} /*  */ +.octicon-primitive-dot:before { content: '\f052'} /*  */ +.octicon-primitive-square:before { content: '\f053'} /*  */ +.octicon-pulse:before { content: '\f085'} /*  */ +.octicon-puzzle:before { content: '\f0c0'} /*  */ +.octicon-question:before { content: '\f02c'} /*  */ +.octicon-quote:before { content: '\f063'} /*  */ +.octicon-radio-tower:before { content: '\f030'} /*  */ +.octicon-repo-delete:before, +.octicon-repo:before { content: '\f001'} /*  */ +.octicon-repo-clone:before { content: '\f04c'} /*  */ +.octicon-repo-force-push:before { content: '\f04a'} /*  */ +.octicon-gist-fork:before, +.octicon-repo-forked:before { content: '\f002'} /*  */ +.octicon-repo-pull:before { content: '\f006'} /*  */ +.octicon-repo-push:before { content: '\f005'} /*  */ +.octicon-rocket:before { content: '\f033'} /*  */ +.octicon-rss:before { content: '\f034'} /*  */ +.octicon-ruby:before { content: '\f047'} /*  */ +.octicon-screen-full:before { content: '\f066'} /*  */ +.octicon-screen-normal:before { content: '\f067'} /*  */ +.octicon-search-save:before, +.octicon-search:before { content: '\f02e'} /*  */ +.octicon-server:before { content: '\f097'} /*  */ +.octicon-settings:before { content: '\f07c'} /*  */ +.octicon-log-in:before, +.octicon-sign-in:before { content: '\f036'} /*  */ +.octicon-log-out:before, +.octicon-sign-out:before { content: '\f032'} /*  */ +.octicon-split:before { content: '\f0c6'} /*  */ +.octicon-squirrel:before { content: '\f0b2'} /*  */ +.octicon-star-add:before, +.octicon-star-delete:before, +.octicon-star:before { content: '\f02a'} /*  */ +.octicon-steps:before { content: '\f0c7'} /*  */ +.octicon-stop:before { content: '\f08f'} /*  */ +.octicon-repo-sync:before, +.octicon-sync:before { content: '\f087'} /*  */ +.octicon-tag-remove:before, +.octicon-tag-add:before, +.octicon-tag:before { content: '\f015'} /*  */ +.octicon-telescope:before { content: '\f088'} /*  */ +.octicon-terminal:before { content: '\f0c8'} /*  */ +.octicon-three-bars:before { content: '\f05e'} /*  */ +.octicon-tools:before { content: '\f031'} /*  */ +.octicon-trashcan:before { content: '\f0d0'} /*  */ +.octicon-triangle-down:before { content: '\f05b'} /*  */ +.octicon-triangle-left:before { content: '\f044'} /*  */ +.octicon-triangle-right:before { content: '\f05a'} /*  */ +.octicon-triangle-up:before { content: '\f0aa'} /*  */ +.octicon-unfold:before { content: '\f039'} /*  */ +.octicon-unmute:before { content: '\f0ba'} /*  */ +.octicon-versions:before { content: '\f064'} /*  */ +.octicon-remove-close:before, +.octicon-x:before { content: '\f081'} /*  */ +.octicon-zap:before { content: '\26A1'} /* ⚡ */ diff --git a/docs/_spec/public/octicons/octicons.eot b/docs/_spec/public/octicons/octicons.eot new file mode 100644 index 000000000000..22881a8b6c43 Binary files /dev/null and b/docs/_spec/public/octicons/octicons.eot differ diff --git a/docs/_spec/public/octicons/octicons.svg b/docs/_spec/public/octicons/octicons.svg new file mode 100644 index 000000000000..ea3e0f161528 --- /dev/null +++ b/docs/_spec/public/octicons/octicons.svg @@ -0,0 +1,198 @@ + + + + +(c) 2012-2014 GitHub + +When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos) + +Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) +Applies to all font files + +Code License: MIT (http://choosealicense.com/licenses/mit/) +Applies to all other files + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/_spec/public/octicons/octicons.ttf b/docs/_spec/public/octicons/octicons.ttf new file mode 100644 index 000000000000..189ca2813d49 Binary files /dev/null and b/docs/_spec/public/octicons/octicons.ttf differ diff --git a/docs/_spec/public/octicons/octicons.woff b/docs/_spec/public/octicons/octicons.woff new file mode 100644 index 000000000000..2b770e429f38 Binary files /dev/null and b/docs/_spec/public/octicons/octicons.woff differ diff --git a/docs/_spec/public/scripts/LICENSE-highlight b/docs/_spec/public/scripts/LICENSE-highlight new file mode 100644 index 000000000000..fe2f67b1628e --- /dev/null +++ b/docs/_spec/public/scripts/LICENSE-highlight @@ -0,0 +1,24 @@ +Copyright (c) 2006, Ivan Sagalaev +All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of highlight.js nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/_spec/public/scripts/LICENSE-toc b/docs/_spec/public/scripts/LICENSE-toc new file mode 100644 index 000000000000..4e236e8696c3 --- /dev/null +++ b/docs/_spec/public/scripts/LICENSE-toc @@ -0,0 +1,18 @@ +(The MIT License) +Copyright (c) 2013 Greg Allen +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/docs/_spec/public/scripts/highlight.pack.js b/docs/_spec/public/scripts/highlight.pack.js new file mode 100644 index 000000000000..bfeca09abb51 --- /dev/null +++ b/docs/_spec/public/scripts/highlight.pack.js @@ -0,0 +1 @@ +var hljs=new function(){function j(v){return v.replace(/&/gm,"&").replace(//gm,">")}function t(v){return v.nodeName.toLowerCase()}function h(w,x){var v=w&&w.exec(x);return v&&v.index==0}function r(w){var v=(w.className+" "+(w.parentNode?w.parentNode.className:"")).split(/\s+/);v=v.map(function(x){return x.replace(/^lang(uage)?-/,"")});return v.filter(function(x){return i(x)||/no(-?)highlight/.test(x)})[0]}function o(x,y){var v={};for(var w in x){v[w]=x[w]}if(y){for(var w in y){v[w]=y[w]}}return v}function u(x){var v=[];(function w(y,z){for(var A=y.firstChild;A;A=A.nextSibling){if(A.nodeType==3){z+=A.nodeValue.length}else{if(A.nodeType==1){v.push({event:"start",offset:z,node:A});z=w(A,z);if(!t(A).match(/br|hr|img|input/)){v.push({event:"stop",offset:z,node:A})}}}}return z})(x,0);return v}function q(w,y,C){var x=0;var F="";var z=[];function B(){if(!w.length||!y.length){return w.length?w:y}if(w[0].offset!=y[0].offset){return(w[0].offset"}function E(G){F+=""}function v(G){(G.event=="start"?A:E)(G.node)}while(w.length||y.length){var D=B();F+=j(C.substr(x,D[0].offset-x));x=D[0].offset;if(D==w){z.reverse().forEach(E);do{v(D.splice(0,1)[0]);D=B()}while(D==w&&D.length&&D[0].offset==x);z.reverse().forEach(A)}else{if(D[0].event=="start"){z.push(D[0].node)}else{z.pop()}v(D.splice(0,1)[0])}}return F+j(C.substr(x))}function m(y){function v(z){return(z&&z.source)||z}function w(A,z){return RegExp(v(A),"m"+(y.cI?"i":"")+(z?"g":""))}function x(D,C){if(D.compiled){return}D.compiled=true;D.k=D.k||D.bK;if(D.k){var z={};var E=function(G,F){if(y.cI){F=F.toLowerCase()}F.split(" ").forEach(function(H){var I=H.split("|");z[I[0]]=[G,I[1]?Number(I[1]):1]})};if(typeof D.k=="string"){E("keyword",D.k)}else{Object.keys(D.k).forEach(function(F){E(F,D.k[F])})}D.k=z}D.lR=w(D.l||/\b[A-Za-z0-9_]+\b/,true);if(C){if(D.bK){D.b="\\b("+D.bK.split(" ").join("|")+")\\b"}if(!D.b){D.b=/\B|\b/}D.bR=w(D.b);if(!D.e&&!D.eW){D.e=/\B|\b/}if(D.e){D.eR=w(D.e)}D.tE=v(D.e)||"";if(D.eW&&C.tE){D.tE+=(D.e?"|":"")+C.tE}}if(D.i){D.iR=w(D.i)}if(D.r===undefined){D.r=1}if(!D.c){D.c=[]}var B=[];D.c.forEach(function(F){if(F.v){F.v.forEach(function(G){B.push(o(F,G))})}else{B.push(F=="self"?D:F)}});D.c=B;D.c.forEach(function(F){x(F,D)});if(D.starts){x(D.starts,C)}var A=D.c.map(function(F){return F.bK?"\\.?("+F.b+")\\.?":F.b}).concat([D.tE,D.i]).map(v).filter(Boolean);D.t=A.length?w(A.join("|"),true):{exec:function(F){return null}}}x(y)}function c(T,L,J,R){function v(V,W){for(var U=0;U";V+=aa+'">';return V+Y+Z}function N(){if(!I.k){return j(C)}var U="";var X=0;I.lR.lastIndex=0;var V=I.lR.exec(C);while(V){U+=j(C.substr(X,V.index-X));var W=E(I,V);if(W){H+=W[1];U+=w(W[0],j(V[0]))}else{U+=j(V[0])}X=I.lR.lastIndex;V=I.lR.exec(C)}return U+j(C.substr(X))}function F(){if(I.sL&&!f[I.sL]){return j(C)}var U=I.sL?c(I.sL,C,true,S):e(C);if(I.r>0){H+=U.r}if(I.subLanguageMode=="continuous"){S=U.top}return w(U.language,U.value,false,true)}function Q(){return I.sL!==undefined?F():N()}function P(W,V){var U=W.cN?w(W.cN,"",true):"";if(W.rB){D+=U;C=""}else{if(W.eB){D+=j(V)+U;C=""}else{D+=U;C=V}}I=Object.create(W,{parent:{value:I}})}function G(U,Y){C+=U;if(Y===undefined){D+=Q();return 0}var W=v(Y,I);if(W){D+=Q();P(W,Y);return W.rB?0:Y.length}var X=z(I,Y);if(X){var V=I;if(!(V.rE||V.eE)){C+=Y}D+=Q();do{if(I.cN){D+=""}H+=I.r;I=I.parent}while(I!=X.parent);if(V.eE){D+=j(Y)}C="";if(X.starts){P(X.starts,"")}return V.rE?0:Y.length}if(A(Y,I)){throw new Error('Illegal lexeme "'+Y+'" for mode "'+(I.cN||"")+'"')}C+=Y;return Y.length||1}var M=i(T);if(!M){throw new Error('Unknown language: "'+T+'"')}m(M);var I=R||M;var S;var D="";for(var K=I;K!=M;K=K.parent){if(K.cN){D=w(K.cN,"",true)+D}}var C="";var H=0;try{var B,y,x=0;while(true){I.t.lastIndex=x;B=I.t.exec(L);if(!B){break}y=G(L.substr(x,B.index-x),B[0]);x=B.index+y}G(L.substr(x));for(var K=I;K.parent;K=K.parent){if(K.cN){D+=""}}return{r:H,value:D,language:T,top:I}}catch(O){if(O.message.indexOf("Illegal")!=-1){return{r:0,value:j(L)}}else{throw O}}}function e(y,x){x=x||b.languages||Object.keys(f);var v={r:0,value:j(y)};var w=v;x.forEach(function(z){if(!i(z)){return}var A=c(z,y,false);A.language=z;if(A.r>w.r){w=A}if(A.r>v.r){w=v;v=A}});if(w.language){v.second_best=w}return v}function g(v){if(b.tabReplace){v=v.replace(/^((<[^>]+>|\t)+)/gm,function(w,z,y,x){return z.replace(/\t/g,b.tabReplace)})}if(b.useBR){v=v.replace(/\n/g,"
")}return v}function p(A){var B=r(A);if(/no(-?)highlight/.test(B)){return}var y;if(b.useBR){y=document.createElementNS("http://www.w3.org/1999/xhtml","div");y.innerHTML=A.innerHTML.replace(/\n/g,"").replace(//g,"\n")}else{y=A}var z=y.textContent;var v=B?c(B,z,true):e(z);var x=u(y);if(x.length){var w=document.createElementNS("http://www.w3.org/1999/xhtml","div");w.innerHTML=v.value;v.value=q(x,u(w),z)}v.value=g(v.value);A.innerHTML=v.value;A.className+=" hljs "+(!B&&v.language||"");A.result={language:v.language,re:v.r};if(v.second_best){A.second_best={language:v.second_best.language,re:v.second_best.r}}}var b={classPrefix:"hljs-",tabReplace:null,useBR:false,languages:undefined};function s(v){b=o(b,v)}function l(){if(l.called){return}l.called=true;var v=document.querySelectorAll("pre code");Array.prototype.forEach.call(v,p)}function a(){addEventListener("DOMContentLoaded",l,false);addEventListener("load",l,false)}var f={};var n={};function d(v,x){var w=f[v]=x(this);if(w.aliases){w.aliases.forEach(function(y){n[y]=v})}}function k(){return Object.keys(f)}function i(v){return f[v]||f[n[v]]}this.highlight=c;this.highlightAuto=e;this.fixMarkup=g;this.highlightBlock=p;this.configure=s;this.initHighlighting=l;this.initHighlightingOnLoad=a;this.registerLanguage=d;this.listLanguages=k;this.getLanguage=i;this.inherit=o;this.IR="[a-zA-Z][a-zA-Z0-9_]*";this.UIR="[a-zA-Z_][a-zA-Z0-9_]*";this.NR="\\b\\d+(\\.\\d+)?";this.CNR="(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)";this.BNR="\\b(0b[01]+)";this.RSR="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~";this.BE={b:"\\\\[\\s\\S]",r:0};this.ASM={cN:"string",b:"'",e:"'",i:"\\n",c:[this.BE]};this.QSM={cN:"string",b:'"',e:'"',i:"\\n",c:[this.BE]};this.PWM={b:/\b(a|an|the|are|I|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such)\b/};this.CLCM={cN:"comment",b:"//",e:"$",c:[this.PWM]};this.CBCM={cN:"comment",b:"/\\*",e:"\\*/",c:[this.PWM]};this.HCM={cN:"comment",b:"#",e:"$",c:[this.PWM]};this.NM={cN:"number",b:this.NR,r:0};this.CNM={cN:"number",b:this.CNR,r:0};this.BNM={cN:"number",b:this.BNR,r:0};this.CSSNM={cN:"number",b:this.NR+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",r:0};this.RM={cN:"regexp",b:/\//,e:/\/[gim]*/,i:/\n/,c:[this.BE,{b:/\[/,e:/\]/,r:0,c:[this.BE]}]};this.TM={cN:"title",b:this.IR,r:0};this.UTM={cN:"title",b:this.UIR,r:0}}();hljs.registerLanguage("scala",function(d){var b={cN:"annotation",b:"@[A-Za-z]+"};var c={cN:"string",b:'u?r?"""',e:'"""',r:10};var a={cN:"symbol",b:"'\\w[\\w\\d_]*(?!')"};var e={cN:"type",b:"\\b[A-Z][A-Za-z0-9_]*",r:0};var h={cN:"title",b:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,r:0};var i={cN:"class",bK:"class object trait type",e:/[:={\[(\n;]/,c:[{cN:"keyword",bK:"extends with",r:10},h]};var g={cN:"function",bK:"def val",e:/[:={\[(\n;]/,c:[h]};var f={cN:"javadoc",b:"/\\*\\*",e:"\\*/",c:[{cN:"javadoctag",b:"@[A-Za-z]+"}],r:10};return{k:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},c:[d.CLCM,d.CBCM,c,d.QSM,a,e,g,i,d.CNM,b]}}); \ No newline at end of file diff --git a/docs/_spec/public/scripts/main.js b/docs/_spec/public/scripts/main.js new file mode 100644 index 000000000000..9ade9c770f1e --- /dev/null +++ b/docs/_spec/public/scripts/main.js @@ -0,0 +1,71 @@ +function currentChapter() { + var path = document.location.pathname; + var idx = path.lastIndexOf("/") + 1; + var chap = path.substring(idx, idx + 2); + return parseInt(chap, 10); +} + +function heading(i, heading, $heading) { + var currentLevel = parseInt(heading.tagName.substring(1)); + var result = ""; + if (currentLevel === this.headerLevel) { + this.headerCounts[this.headerLevel] += 1; + return "" + this.headerCounts[this.headerLevel] + " " + $heading.text(); + } else if (currentLevel < this.headerLevel) { + while(currentLevel < this.headerLevel) { + this.headerCounts[this.headerLevel] = 1; + this.headerLevel -= 1; + } + this.headerCounts[this.headerLevel] += 1; + return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); + } else { + while(currentLevel > this.headerLevel) { + this.headerLevel += 1; + this.headerCounts[this.headerLevel] = 1; + } + return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); + } +} + +// ignore when using wkhtmltopdf, or it won't work... +if(window.jekyllEnv !== 'spec-pdf') { + $('#toc').toc( + { + 'selectors': 'h1,h2,h3', + 'smoothScrolling': false, + 'chapter': currentChapter(), + 'headerLevel': 1, + 'headerCounts': [-1, currentChapter() - 1, 1, 1], + 'headerText': heading + } + ); +} + +// no language auto-detect so that EBNF isn't detected as scala +hljs.configure({ + languages: [] +}); + +// KaTeX configuration +document.addEventListener("DOMContentLoaded", function() { + renderMathInElement(document.body, { + delimiters: [ + {left: "´", right: "´", display: false}, // "display: false" -> inline + {left: "$$", right: "$$", display: true} + ], + ignoredTags: ['script', 'noscript', 'style', 'textarea'], + }); + // syntax highlighting after KaTeX is loaded, + // so that math can be used in code blocks + hljs.initHighlighting(); + $("pre nobr").addClass("fixws"); + // point when all necessary js is done, so PDF to be rendered + window.status = "loaded"; +}); + +$("#chapters a").each(function (index) { + if (document.location.pathname.endsWith($(this).attr("href"))) + $(this).addClass("chapter-active"); + else + $(this).removeClass("chapter-active"); +}); diff --git a/docs/_spec/public/scripts/toc.js b/docs/_spec/public/scripts/toc.js new file mode 100644 index 000000000000..5b0bded12cfc --- /dev/null +++ b/docs/_spec/public/scripts/toc.js @@ -0,0 +1,128 @@ +/*! + * toc - jQuery Table of Contents Plugin + * v0.3.2 + * http://projects.jga.me/toc/ + * copyright Greg Allen 2014 + * MIT License +*/ +(function($) { +var verboseIdCache = {}; +$.fn.toc = function(options) { + var self = this; + var opts = $.extend({}, jQuery.fn.toc.defaults, options); + + var container = $(opts.container); + var headings = $(opts.selectors, container); + var headingOffsets = []; + var activeClassName = opts.activeClass; + + var scrollTo = function(e, callback) { + $('li', self).removeClass(activeClassName); + $(e.target).parent().addClass(activeClassName); + }; + + //highlight on scroll + var timeout; + var highlightOnScroll = function(e) { + if (timeout) { + clearTimeout(timeout); + } + timeout = setTimeout(function() { + var top = $(window).scrollTop(), + highlighted, closest = Number.MAX_VALUE, index = 0; + + for (var i = 0, c = headingOffsets.length; i < c; i++) { + var currentClosest = Math.abs(headingOffsets[i] - top); + if (currentClosest < closest) { + index = i; + closest = currentClosest; + } + } + + $('li', self).removeClass(activeClassName); + highlighted = $('li:eq('+ index +')', self).addClass(activeClassName); + opts.onHighlight(highlighted); + }, 50); + }; + if (opts.highlightOnScroll) { + $(window).on('scroll', highlightOnScroll); + highlightOnScroll(); + } + + return this.each(function() { + //build TOC + var el = $(this); + var ul = $(opts.listType); + + headings.each(function(i, heading) { + var $h = $(heading); + headingOffsets.push($h.offset().top - opts.highlightOffset); + + var anchorName = opts.anchorName(i, heading, opts.prefix); + + //add anchor + if(heading.id !== anchorName) { + var anchor = $('').attr('id', anchorName).insertBefore($h); + } + + //build TOC item + var a = $('') + .text(opts.headerText(i, heading, $h)) + .attr('href', '#' + anchorName) + .on('click', function(e) { + $(window).off('scroll', highlightOnScroll); + scrollTo(e, function() { + $(window).on('scroll', highlightOnScroll); + }); + el.trigger('selected', $(this).attr('href')); + }); + + var li = $('
  • ') + .addClass(opts.itemClass(i, heading, $h, opts.prefix)) + .append(a); + + ul.append(li); + }); + el.html(ul); + }); +}; + + +jQuery.fn.toc.defaults = { + container: 'body', + listType: '
      ', + selectors: 'h1,h2,h3', + prefix: 'toc', + activeClass: 'toc-active', + onHighlight: function() {}, + highlightOnScroll: true, + highlightOffset: 100, + anchorName: function(i, heading, prefix) { + if(heading.id.length) { + return heading.id; + } + + var candidateId = $(heading).text().replace(/[^a-z0-9]/ig, ' ').replace(/\s+/g, '-').toLowerCase(); + if (verboseIdCache[candidateId]) { + var j = 2; + + while(verboseIdCache[candidateId + j]) { + j++; + } + candidateId = candidateId + '-' + j; + + } + verboseIdCache[candidateId] = true; + + return prefix + '-' + candidateId; + }, + headerText: function(i, heading, $heading) { + return $heading.text(); + }, + itemClass: function(i, heading, $heading, prefix) { + return prefix + '-' + $heading[0].tagName.toLowerCase(); + } + +}; + +})(jQuery); diff --git a/docs/_spec/public/stylesheets/fonts.css b/docs/_spec/public/stylesheets/fonts.css new file mode 100644 index 000000000000..36efb2bbd5a0 --- /dev/null +++ b/docs/_spec/public/stylesheets/fonts.css @@ -0,0 +1,73 @@ +@font-face { + font-family: 'Luxi Sans'; + src: local('Luxi Sans Regular'), + url('../fonts/LuxiSans-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} + +@font-face { + font-family: 'Luxi Sans'; + src: local('Luxi Sans Bold'), + url('../fonts/LuxiSans-Bold.woff') format('woff'); + font-weight: bold; + font-style: normal; +} + +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Regular'), + url('../fonts/LuxiMono-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Oblique'), + url('../fonts/LuxiMono-BoldOblique.woff') format('woff'); + font-weight: normal; + font-style: oblique; +} +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Bold'), + url('../fonts/LuxiMono-Bold.woff') format('woff'); + font-weight: bold; + font-style: normal; +} +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Bold Oblique'), + url('../fonts/LuxiMono-BoldOblique.woff') format('woff'); + font-weight: bold; + font-style: oblique; +} + +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Regular'), + url('../fonts/Heuristica-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Italic'), + url('../fonts/Heuristica-RegularItalic.woff') format('woff'); + font-weight: normal; + font-style: italic; +} +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Bold'), + url('../fonts/Heuristica-Bold.woff') format('woff'); + font-weight: bold; + font-style: normal; +} +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Bold Italic'), + url('../fonts/Heuristica-BoldItalic.woff') format('woff'); + font-weight: bold; + font-style: italic; +} diff --git a/docs/_spec/public/stylesheets/print.css b/docs/_spec/public/stylesheets/print.css new file mode 100644 index 000000000000..f0efff28b203 --- /dev/null +++ b/docs/_spec/public/stylesheets/print.css @@ -0,0 +1,42 @@ +/* This removes a few things from screen.css for printing */ + +body { + padding: 0px; + margin: 0px; +} + +.anchor, #navigation, .to_top, .version-notice, .hidden-print { + display: none !important; +} + +.print-only { + display: block; +} + +#content-container { + width: 100%; + float: none; +} + +/* no scrollbars, jump to next row.. */ +.highlight pre code { + overflow: hidden; + white-space: pre-wrap; +} + +main { + position: relative; + top: 32px; + margin: 0 0 0 0; + padding: 0px 32px; + max-width: none; + min-width: none; + min-height: none; + background-color: #FFF; +} + +/* Avoid clipped headings https://github.com/pdfkit/pdfkit/issues/113#issuecomment-7027798 */ +h2, h3, h4, h5, h6 { + padding: 0px; + margin: 0px; +} diff --git a/docs/_spec/public/stylesheets/screen-small.css b/docs/_spec/public/stylesheets/screen-small.css new file mode 100644 index 000000000000..674db7c49000 --- /dev/null +++ b/docs/_spec/public/stylesheets/screen-small.css @@ -0,0 +1,57 @@ +body { + padding: 0px; + margin: 0px; +} +aside.left { + position: relative; + margin: 0px auto; + overflow: visible; + height: inherit; + margin-bottom: 40px; + background-color: #073642; +} +header { + position: relative; + height: inherit; + min-height: 32px; +} +main { + max-width: 1000px; + min-width: 600px; + margin: 0 auto; +} + +#chapters a { + font-size: 14px; + max-height: 32px; + padding: 4px 8px; + white-space: nowrap; + display: inline-block; +} +#chapters > #github { + padding: 14px; +} + +#toc { + overflow: visible; +} +#toc .toc-active { + background: inherit; +} +#toc .toc-h1 { + display: inherit; +} +#toc .toc-h1 a { + padding-left: 10px; + color: #FFFFFF; + background: #72D0EB; +} +#toc .toc-h2 a { + padding-left: 30px; +} +#toc .toc-h3 a { + padding-left: 50px; +} +#toc a { + font-size: 14px; +} diff --git a/docs/_spec/public/stylesheets/screen-toc.css b/docs/_spec/public/stylesheets/screen-toc.css new file mode 100644 index 000000000000..7a04bd00f96c --- /dev/null +++ b/docs/_spec/public/stylesheets/screen-toc.css @@ -0,0 +1,37 @@ +body { + padding: 0px; + margin: 0px; +} +header { + height: 96px; + padding: 0px; + width: 100%; + position: relative; + color: #FFFFFF; +} +#header-main { + height: 68px; + line-height: 1.2; + font-size: 32px; +} +#header-sub { + padding-left: 64px; + height: 28px; + background-color:#72D0EB; + vertical-align: middle; +} +#scala-logo { + padding: 10px; +} +#title { + vertical-align: middle; +} +#github { + height: 40px; + padding: 14px; + float: right; + font-size: 0px; +} +li { + margin: 5px; +} diff --git a/docs/_spec/public/stylesheets/screen.css b/docs/_spec/public/stylesheets/screen.css new file mode 100644 index 000000000000..2073613eaea7 --- /dev/null +++ b/docs/_spec/public/stylesheets/screen.css @@ -0,0 +1,521 @@ +/* from https://gist.github.com/andyferra/2554919 */ + +body { + font-family:Heuristica,Georgia,serif; + color: #222222; + line-height: 1.6; + + padding-bottom: 10px; + background-color: white; + padding-left: 30px; +} + +#content-container > *:first-child { + margin-top: 0 !important; +} +#content-container > *:last-child { + margin-bottom: 0 !important; +} + +a { + color: #08C; + text-decoration: none; +} +a:hover, a:focus { + +} +a.absent { + color: #cc0000; +} +a.anchor { + display: block; + margin-left: -35px; + padding-left: 10px; + cursor: pointer; + position: absolute; + top: 0; + left: 0; + bottom: 0; + color: black; + width: 35px; height: 100%; +} + +a.anchor span { + vertical-align: middle; +} + +h1, h2, h3, h4, h5, h6 { + margin: 30px 0 0px; + padding: 0; + /* Fix anchor position due to header */ + padding-top: 32px; + margin-top: -32px; + font-weight: bold; + -webkit-font-smoothing: antialiased; + cursor: text; + position: relative; + pointer-events: none; +} + +h1, h2 { + font-weight: normal; +} + +h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, h5:hover a.anchor, h6:hover a.anchor { + text-decoration: none; +} + +h1:hover a.anchor span, h2:hover a.anchor span, h3:hover a.anchor span, h4:hover a.anchor span, h5:hover a.anchor span, h6:hover a.anchor span { + display: inline-block; +} + +h1 a.anchor span, h2 a.anchor span, h3 a.anchor span, h4 a.anchor span, h5 a.anchor span, h6 a.anchor span { + display: none; +} + +h1 a.anchor:hover span, h2 a.anchor:hover span, h3 a.anchor:hover span, h4 a.anchor:hover span, h5 a.anchor:hover span, h6 a.anchor:hover span { + display: inline-block; +} + +h1 tt, h1 code { + font-size: inherit; +} + +h2 tt, h2 code { + font-size: inherit; +} + +h3 tt, h3 code { + font-size: inherit; +} + +h4 tt, h4 code { + font-size: inherit; +} + +h5 tt, h5 code { + font-size: inherit; +} + +h6 tt, h6 code { + font-size: inherit; +} + +h1 { + font-size: 28px; + color: black; +} + +h2 { + font-size: 24px; + color: black; +} + +h3 { + font-size: 18px; +} + +h4 { + font-size: 16px; +} + +h5 { + font-size: 14px; +} + +h6 { + color: #777777; + font-size: 14px; +} + +p, blockquote, ul, ol, dl, li, table, pre { + margin: 5px 0 15px; + -moz-font-feature-settings: "onum"; + -ms-font-feature-settings: "onum"; + -webkit-font-feature-settings: "onum"; + font-feature-settings: "onum"; +} + +hr { + background: transparent repeat-x 0 0; + border: 0 none; + color: #cccccc; + height: 4px; + padding: 0; +} + +body > h2:first-child { + margin-top: 0; + padding-top: 0; +} +body > h1:first-child { + margin-top: 0; + padding-top: 0; +} +body > h1:first-child + h2 { + margin-top: 0; + padding-top: 0; +} +body > h3:first-child, body > h4:first-child, body > h5:first-child, body > h6:first-child { + margin-top: 0; + padding-top: 0; +} + +a:first-child h1, a:first-child h2, a:first-child h3, a:first-child h4, a:first-child h5, a:first-child h6 { + margin-top: 0; + padding-top: 0; +} + +h1 p, h2 p, h3 p, h4 p, h5 p, h6 p { + margin-top: 0; +} + +li p.first { + display: inline-block; +} + +ul, ol { + padding-left: 30px; +} + +ul :first-child, ol :first-child { + margin-top: 0; +} + +ul :last-child, ol :last-child { + margin-bottom: 0; +} + +dl { + padding: 0; +} +dl dt { + font-size: 14px; + font-weight: bold; + font-style: italic; + padding: 0; + margin: 15px 0 5px; +} +dl dt:first-child { + padding: 0; +} +dl dt > :first-child { + margin-top: 0; +} +dl dt > :last-child { + margin-bottom: 0; +} +dl dd { + margin: 0 0 15px; + padding: 0 15px; +} +dl dd > :first-child { + margin-top: 0; +} +dl dd > :last-child { + margin-bottom: 0; +} + +blockquote { + border-left: 4px solid #dddddd; + padding: 0 15px; + color: #222222; +} +blockquote > :first-child { + margin-top: 0; +} +blockquote > :last-child { + margin-bottom: 0; +} +blockquote:before { + content: "Example"; + color: #777777; + font-size: 14px; + font-weight: bold; +} + +table { + padding: 0; + margin: 0; + border: none; + border-collapse: collapse; +} +table tr { + background-color: white; +} +table tr:nth-child(2n) { + background-color: #f8f8f8; +} +table tr th { + background-color: #EAEAEA; + font-weight: bold; + text-align: left; + padding: 5px 13px; +} +table tr td { + text-align: left; + padding: 5px 13px; +} +table tr th :first-child, table tr td :first-child { + margin-top: 0; +} +table tr th :last-child, table tr td :last-child { + margin-bottom: 0; +} + +img { + max-width: 100%; +} + +span.frame { + display: block; + overflow: hidden; +} +span.frame > span { + border: 1px solid #dddddd; + display: block; + float: left; + overflow: hidden; + margin: 13px 0 0; + padding: 7px; + width: auto; +} +span.frame span img { + display: block; + float: left; +} +span.frame span span { + clear: both; + color: #333333; + display: block; + padding: 5px 0 0; +} +span.align-center { + display: block; + overflow: hidden; + clear: both; +} +span.align-center > span { + display: block; + overflow: hidden; + margin: 13px auto 0; + text-align: center; +} +span.align-center span img { + margin: 0 auto; + text-align: center; +} +span.align-right { + display: block; + overflow: hidden; + clear: both; +} +span.align-right > span { + display: block; + overflow: hidden; + margin: 13px 0 0; + text-align: right; +} +span.align-right span img { + margin: 0; + text-align: right; +} +span.float-left { + display: block; + margin-right: 13px; + overflow: hidden; + float: left; +} +span.float-left span { + margin: 13px 0 0; +} +span.float-right { + display: block; + margin-left: 13px; + overflow: hidden; + float: right; +} +span.float-right > span { + display: block; + overflow: hidden; + margin: 13px auto 0; + text-align: right; +} + +pre, code, tt { + font:14px "Luxi Mono", 'andale mono', 'lucida console', monospace; + line-height:1.5; +} + +.highlight pre { + background-color: #F8F8F8; + border-radius: 3px; + overflow: auto; + padding: 6px 10px; + white-space: nowrap; +} + +code { + background-color: transparent; + border: none; + margin: 0; + padding: 0; + white-space: pre; +} + +aside.left { + height: 100%; + position: fixed; + direction: rtl; + overflow: auto; + left: 0px; + width: 320px; + bottom: -32px; + font-family: "Luxi Sans", serif; + background-color: #073642; +} + +aside.left > nav { + direction: ltr; + top: 32px; + padding-bottom: 32px; +} + +article, aside, details, figcaption, figure, footer, header, hgroup, main, nav, section, summary { + display: block; +} + +audio, canvas, img, svg, video { + vertical-align: middle; +} + +audio, canvas, progress, video { + display: inline-block; + vertical-align: baseline; +} + +main { + position: relative; + top: 32px; + margin: 0 0 0 320px; + padding: 0px 32px; + max-width: 800px; + min-width: 800px; + min-height: 580px; + background-color: #FFF; +} + +header { + position: fixed; + top: 0px; + left: 0px; + height: 32px; + width: 100%; + background-color: #002B36; + margin: 0px 0px; + padding: 0px 0px; + font-family: "Luxi Sans", serif; + font-weight: bold; + z-index: 10; + overflow: hidden; + text-shadow: 1px 1px 0px rgba(0, 43, 54, 0.15); +} + +#chapters a { + color: #FFFFFF; + text-decoration: none; + font-size: 0.63vw; + padding: 100% 5px; +} + +#chapters a:hover, #chapters a:focus, #github:hover, #github:focus { + background: #DC322F; + -webkit-transition: background .2s ease-in; + -moz-transition: background .2s ease-in; + -ms-transition: background .2s ease-in; + -o-transition: background .2s ease-in; + transition: background .2s ease-in; +} + +#chapters a.chapter-active { + background: #72D0EB; +} + + +#toc ul { + margin: 0; + padding: 0; + list-style: none; +} + +#toc li { + margin: 0; + padding: 0; +} + +#toc a { + color: #FFFFFF; /*#073642;*/ + font-weight: bold; + font-size: 12px; + display: block; + text-shadow: 1px 1px 0px rgba(0, 43, 54, 0.15); +} + +#toc a:hover, #toc a:focus { + background: #DC322F; + text-decoration: none; + -webkit-transition: background .2s ease-in; + -moz-transition: background .2s ease-in; + -ms-transition: background .2s ease-in; + -o-transition: background .2s ease-in; + transition: background .2s ease-in; +} + +#toc .toc-h1 { + display: none; +} + +#toc .toc-h2 a { + padding-left: 10px; +} + +#toc .toc-h3 a { + padding-left: 30px; +} + +#toc .toc-active { + background: #72D0EB; +} + +#toc .toc-active a { + color: #FFFFFF; +} + +#chapters > #github { + padding: 0px; + float: right; +} + +.hljs{ + background: #f8f8f8; +} +/* proper rendering of MathJax into highlighted code blocks */ +.fixws { white-space: pre; } +.fixws .math { white-space: nowrap; } + +.version-notice { + background-color: #C93A3A; + color: #f2f2f2; + border:1px solid #ccc; + padding: 1em; + margin-bottom: 1em; +} +.version-notice a { + color: #f2f2f2; + font-weight: bold; + text-decoration: underline; +} + +.print-only { + display: none; +} diff --git a/docs/_spec/spec-toc.xslt b/docs/_spec/spec-toc.xslt new file mode 100644 index 000000000000..437b15e3e6f4 --- /dev/null +++ b/docs/_spec/spec-toc.xslt @@ -0,0 +1,64 @@ + + + + + + + Table of Contents + + + ./public/stylesheets/fonts.css + + + + +

      Table of Contents

      +
      + + +
      + +
    • + + + +
        + added to prevent self-closing tags in QtXmlPatterns + +
      +
    • + + diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 65fd07031290..1e791472bceb 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -55,10 +55,10 @@ subsection: - page: reference/metaprogramming/macros.md - page: reference/metaprogramming/macros-spec.md hidden: true + - page: reference/metaprogramming/simple-smp.md # description of a simplified metaprogramming language, this might not be the best place for it - page: reference/metaprogramming/staging.md - page: reference/metaprogramming/reflection.md - page: reference/metaprogramming/tasty-inspect.md - - page: reference/metaprogramming/simple-smp.md - title: Other New Features index: reference/other-new-features/other-new-features.md subsection: @@ -150,7 +150,9 @@ subsection: - page: reference/experimental/numeric-literals.md - page: reference/experimental/explicit-nulls.md - page: reference/experimental/main-annotation.md + - page: reference/experimental/into-modifier.md - page: reference/experimental/cc.md + - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md - page: reference/syntax.md - title: Language Versions @@ -171,11 +173,13 @@ subsection: - page: contributing/debugging.md - title: IDEs and Tools directory: tools + index: contributing/tools/index.md subsection: - page: contributing/tools/ide.md - page: contributing/tools/mill.md - page: contributing/tools/scalafix.md - title: Procedures + index: contributing/procedures/index.md subsection: - page: contributing/procedures/release.md - page: contributing/procedures/vulpix.md diff --git a/interfaces/src/dotty/tools/dotc/interfaces/Diagnostic.java b/interfaces/src/dotty/tools/dotc/interfaces/Diagnostic.java index c46360afaa3d..19878a2fa105 100644 --- a/interfaces/src/dotty/tools/dotc/interfaces/Diagnostic.java +++ b/interfaces/src/dotty/tools/dotc/interfaces/Diagnostic.java @@ -1,6 +1,7 @@ package dotty.tools.dotc.interfaces; import java.util.Optional; +import java.util.List; /** A diagnostic is a message emitted during the compilation process. * @@ -23,4 +24,7 @@ public interface Diagnostic { /** @return The position in a source file of the code that caused this diagnostic * to be emitted. */ Optional position(); + + /** @return A list of additional messages together with their code positions */ + List diagnosticRelatedInformation(); } diff --git a/interfaces/src/dotty/tools/dotc/interfaces/DiagnosticRelatedInformation.java b/interfaces/src/dotty/tools/dotc/interfaces/DiagnosticRelatedInformation.java new file mode 100644 index 000000000000..3ebea03f4362 --- /dev/null +++ b/interfaces/src/dotty/tools/dotc/interfaces/DiagnosticRelatedInformation.java @@ -0,0 +1,6 @@ +package dotty.tools.dotc.interfaces; + +public interface DiagnosticRelatedInformation { + SourcePosition position(); + String message(); +} diff --git a/language-server/test/dotty/tools/languageserver/HoverTest.scala b/language-server/test/dotty/tools/languageserver/HoverTest.scala index bbfec815e79a..a2196f4a71f3 100644 --- a/language-server/test/dotty/tools/languageserver/HoverTest.scala +++ b/language-server/test/dotty/tools/languageserver/HoverTest.scala @@ -244,4 +244,10 @@ class HoverTest { .hover(m1 to m2, hoverContent("Double")) .hover(m3 to m4, hoverContent("Double")) } + + @Test def annotation: Unit = { + code"""|@${m1}deprecated${m2} def ${m3}x${m4} = 42.0""" + .hover(m1 to m2, hoverContent("deprecated")) + .hover(m3 to m4, hoverContent("Double")) + } } diff --git a/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala b/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala new file mode 100644 index 000000000000..87208573eff9 --- /dev/null +++ b/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala @@ -0,0 +1,8 @@ +package scala.scalajs.runtime + +import scala.scalajs.js + +@inline +final class AnonFunctionXXL(f: js.Function1[IArray[Object], Object]) extends scala.runtime.FunctionXXL { + override def apply(xs: IArray[Object]): Object = f(xs) +} diff --git a/library/src/scala/CanEqual.scala b/library/src/scala/CanEqual.scala index dfb4ec7d2bfc..8c331bb21b43 100644 --- a/library/src/scala/CanEqual.scala +++ b/library/src/scala/CanEqual.scala @@ -1,7 +1,7 @@ package scala import annotation.implicitNotFound -import scala.collection.{Seq, Set} +import scala.collection.{Seq, Set, Map} /** A marker trait indicating that values of type `L` can be compared to values of type `R`. */ @implicitNotFound("Values of types ${L} and ${R} cannot be compared with == or !=") @@ -26,7 +26,7 @@ object CanEqual { given canEqualNumber: CanEqual[Number, Number] = derived given canEqualString: CanEqual[String, String] = derived - // The next 6 definitions can go into the companion objects of their corresponding + // The following definitions can go into the companion objects of their corresponding // classes. For now they are here in order not to have to touch the // source code of these classes given canEqualSeqs[T, U](using eq: CanEqual[T, U]): CanEqual[Seq[T], Seq[U]] = derived @@ -34,6 +34,10 @@ object CanEqual { given canEqualSet[T, U](using eq: CanEqual[T, U]): CanEqual[Set[T], Set[U]] = derived + given canEqualMap[K1, V1, K2, V2]( + using eqK: CanEqual[K1, K2], eqV: CanEqual[V1, V2] + ): CanEqual[Map[K1, V1], Map[K2, V2]] = derived + given canEqualOptions[T, U](using eq: CanEqual[T, U]): CanEqual[Option[T], Option[U]] = derived given canEqualOption[T](using eq: CanEqual[T, T]): CanEqual[Option[T], Option[T]] = derived // for `case None` in pattern matching diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 703f8a1e2992..fa72e320b560 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -83,7 +83,7 @@ sealed trait Tuple extends Product { object Tuple { /** Type of a tuple with an element appended */ - type Append[X <: Tuple, Y] <: Tuple = X match { + type Append[X <: Tuple, Y] <: NonEmptyTuple = X match { case EmptyTuple => Y *: EmptyTuple case x *: xs => x *: Append[xs, Y] } diff --git a/library/src/scala/annotation/MacroAnnotation.scala b/library/src/scala/annotation/MacroAnnotation.scala new file mode 100644 index 000000000000..5c39ef45f417 --- /dev/null +++ b/library/src/scala/annotation/MacroAnnotation.scala @@ -0,0 +1,212 @@ +// TODO in which package should this class be located? +package scala +package annotation + +import scala.quoted._ + +/** Base trait for macro annotation implementation. + * Macro annotations can transform definitions and add new definitions. + * + * See: `MacroAnnotation.transform` + * + * @syntax markdown + */ +@experimental +trait MacroAnnotation extends StaticAnnotation: + + /** Transform the `tree` definition and add new definitions + * + * This method takes as argument the annotated definition. + * It returns a non-empty list containing the modified version of the annotated definition. + * The new tree for the definition must use the original symbol. + * New definitions can be added to the list before or after the transformed definitions, this order + * will be retained. New definitions will not be visible from outside the macro expansion. + * + * #### Restrictions + * - All definitions in the result must have the same owner. The owner can be recovered from `Symbol.spliceOwner`. + * - Special case: an annotated top-level `def`, `val`, `var`, `lazy val` can return a `class`/`object` +definition that is owned by the package or package object. + * - Can not return a `type`. + * - Annotated top-level `class`/`object` can not return top-level `def`, `val`, `var`, `lazy val`. + * - Can not see new definition in user written code. + * + * #### Good practices + * - Make your new definitions private if you can. + * - New definitions added as class members should use a fresh name (`Symbol.freshName`) to avoid collisions. + * - New top-level definitions should use a fresh name (`Symbol.freshName`) that includes the name of the annotated + * member as a prefix to avoid collisions of definitions added in other files. + * + * **IMPORTANT**: When developing and testing a macro annotation, you must enable `-Xcheck-macros` and `-Ycheck:all`. + * + * #### Example 1 + * This example shows how to modify a `def` and add a `val` next to it using a macro annotation. + * ```scala + * import scala.quoted.* + * import scala.collection.mutable + * + * class memoize extends MacroAnnotation: + * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + * import quotes.reflect._ + * tree match + * case DefDef(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(rhsTree)) => + * (param.tpt.tpe.asType, tpt.tpe.asType) match + * case ('[t], '[u]) => + * val cacheName = Symbol.freshName(name + "Cache") + * val cacheSymbol = Symbol.newVal(Symbol.spliceOwner, cacheName, TypeRepr.of[mutable.Map[t, u]], Flags.Private, Symbol.noSymbol) + * val cacheRhs = + * given Quotes = cacheSymbol.asQuotes + * '{ mutable.Map.empty[t, u] }.asTerm + * val cacheVal = ValDef(cacheSymbol, Some(cacheRhs)) + * val newRhs = + * given Quotes = tree.symbol.asQuotes + * val cacheRefExpr = Ref(cacheSymbol).asExprOf[mutable.Map[t, u]] + * val paramRefExpr = Ref(param.symbol).asExprOf[t] + * val rhsExpr = rhsTree.asExprOf[u] + * '{ $cacheRefExpr.getOrElseUpdate($paramRefExpr, $rhsExpr) }.asTerm + * val newTree = DefDef.copy(tree)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) + * List(cacheVal, newTree) + * case _ => + * report.error("Annotation only supported on `def` with a single argument are supported") + * List(tree) + * ``` + * with this macro annotation a user can write + * ```scala + * //{ + * class memoize extends scala.annotation.StaticAnnotation + * //} + * @memoize + * def fib(n: Int): Int = + * println(s"compute fib of $n") + * if n <= 1 then n else fib(n - 1) + fib(n - 2) + * ``` + * and the macro will modify the definition to create + * ```scala + * val fibCache$macro$1 = + * scala.collection.mutable.Map.empty[Int, Int] + * def fib(n: Int): Int = + * fibCache$macro$1.getOrElseUpdate( + * n, + * { + * println(s"compute fib of $n") + * if n <= 1 then n else fib(n - 1) + fib(n - 2) + * } + * ) + * ``` + * + * #### Example 2 + * This example shows how to modify a `class` using a macro annotation. + * It shows how to override inherited members and add new ones. + * ```scala + * import scala.annotation.{experimental, MacroAnnotation} + * import scala.quoted.* + * + * @experimental + * class equals extends MacroAnnotation: + * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + * import quotes.reflect.* + * tree match + * case ClassDef(className, ctr, parents, self, body) => + * val cls = tree.symbol + * + * val constructorParameters = ctr.paramss.collect { case clause: TermParamClause => clause } + * if constructorParameters.size != 1 || constructorParameters.head.params.isEmpty then + * report.errorAndAbort("@equals class must have a single argument list with at least one argument", ctr.pos) + * def checkNotOverridden(sym: Symbol): Unit = + * if sym.overridingSymbol(cls).exists then + * report.error(s"Cannot override ${sym.name} in a @equals class") + * + * val fields = body.collect { + * case vdef: ValDef if vdef.symbol.flags.is(Flags.ParamAccessor) => + * Select(This(cls), vdef.symbol).asExpr + * } + * + * val equalsSym = Symbol.requiredMethod("java.lang.Object.equals") + * checkNotOverridden(equalsSym) + * val equalsOverrideSym = Symbol.newMethod(cls, "equals", equalsSym.info, Flags.Override, Symbol.noSymbol) + * def equalsOverrideDefBody(argss: List[List[Tree]]): Option[Term] = + * given Quotes = equalsOverrideSym.asQuotes + * cls.typeRef.asType match + * case '[c] => + * Some(equalsExpr[c](argss.head.head.asExpr, fields).asTerm) + * val equalsOverrideDef = DefDef(equalsOverrideSym, equalsOverrideDefBody) + * + * val hashSym = Symbol.newVal(cls, Symbol.freshName("hash"), TypeRepr.of[Int], Flags.Private | Flags.Lazy, Symbol.noSymbol) + * val hashVal = ValDef(hashSym, Some(hashCodeExpr(className, fields)(using hashSym.asQuotes).asTerm)) + * + * val hashCodeSym = Symbol.requiredMethod("java.lang.Object.hashCode") + * checkNotOverridden(hashCodeSym) + * val hashCodeOverrideSym = Symbol.newMethod(cls, "hashCode", hashCodeSym.info, Flags.Override, Symbol.noSymbol) + * val hashCodeOverrideDef = DefDef(hashCodeOverrideSym, _ => Some(Ref(hashSym))) + * + * val newBody = equalsOverrideDef :: hashVal :: hashCodeOverrideDef :: body + * List(ClassDef.copy(tree)(className, ctr, parents, self, newBody)) + * case _ => + * report.error("Annotation only supports `class`") + * List(tree) + * + * private def equalsExpr[T: Type](that: Expr[Any], thisFields: List[Expr[Any]])(using Quotes): Expr[Boolean] = + * '{ + * $that match + * case that: T @unchecked => + * ${ + * val thatFields: List[Expr[Any]] = + * import quotes.reflect.* + * thisFields.map(field => Select('{that}.asTerm, field.asTerm.symbol).asExpr) + * thisFields.zip(thatFields) + * .map { case (thisField, thatField) => '{ $thisField == $thatField } } + * .reduce { case (pred1, pred2) => '{ $pred1 && $pred2 } } + * } + * case _ => false + * } + * + * private def hashCodeExpr(className: String, thisFields: List[Expr[Any]])(using Quotes): Expr[Int] = + * '{ + * var acc: Int = ${ Expr(scala.runtime.Statics.mix(-889275714, className.hashCode)) } + * ${ + * Expr.block( + * thisFields.map { + * case '{ $field: Boolean } => '{ if $field then 1231 else 1237 } + * case '{ $field: Byte } => '{ $field.toInt } + * case '{ $field: Char } => '{ $field.toInt } + * case '{ $field: Short } => '{ $field.toInt } + * case '{ $field: Int } => field + * case '{ $field: Long } => '{ scala.runtime.Statics.longHash($field) } + * case '{ $field: Double } => '{ scala.runtime.Statics.doubleHash($field) } + * case '{ $field: Float } => '{ scala.runtime.Statics.floatHash($field) } + * case '{ $field: Null } => '{ 0 } + * case '{ $field: Unit } => '{ 0 } + * case field => '{ scala.runtime.Statics.anyHash($field) } + * }.map(hash => '{ acc = scala.runtime.Statics.mix(acc, $hash) }), + * '{ scala.runtime.Statics.finalizeHash(acc, ${Expr(thisFields.size)}) } + * ) + * } + * } + * ``` + * with this macro annotation a user can write + * ```scala + * //{ + * class equals extends scala.annotation.StaticAnnotation + * //} + * @equals class User(val name: String, val id: Int) + * ``` + * and the macro will modify the class definition to generate the following code + * ```scala + * class User(val name: String, val id: Int): + * override def equals(that: Any): Boolean = + * that match + * case that: User => this.name == that.name && this.id == that.id + * case _ => false + * private lazy val hash$macro$1: Int = + * var acc = 515782504 // scala.runtime.Statics.mix(-889275714, "User".hashCode) + * acc = scala.runtime.Statics.mix(acc, scala.runtime.Statics.anyHash(name)) + * acc = scala.runtime.Statics.mix(acc, id) + * scala.runtime.Statics.finalizeHash(acc, 2) + * override def hashCode(): Int = hash$macro$1 + * ``` + * + * @param Quotes Implicit instance of Quotes used for tree reflection + * @param tree Tree that will be transformed + * + * @syntax markdown + */ + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] diff --git a/library/src/scala/annotation/allowConversions.scala b/library/src/scala/annotation/allowConversions.scala new file mode 100644 index 000000000000..9d752ee26d21 --- /dev/null +++ b/library/src/scala/annotation/allowConversions.scala @@ -0,0 +1,10 @@ +package scala.annotation +import annotation.experimental + +/** An annotation on a parameter type that allows implicit conversions + * for its arguments. Intended for use by Scala 2, to annotate Scala 2 + * libraries. Scala 3 uses the `into` modifier on the parameter + * type instead. + */ +@experimental +class allowConversions extends scala.annotation.StaticAnnotation diff --git a/library/src/scala/annotation/internal/MappedAlternative.scala b/library/src/scala/annotation/internal/MappedAlternative.scala new file mode 100644 index 000000000000..19bd84df633c --- /dev/null +++ b/library/src/scala/annotation/internal/MappedAlternative.scala @@ -0,0 +1,13 @@ +package scala.annotation +package internal + +/** An annotation added by overloading resoluton to mapped symbols that + * explore deeper into the types of the opverloaded alternatives. + * Its tree is a TypeTree with two parameters which are both needed to + * fine default getters in later parameter sections. + * @param Prefix the prefix field of the original alternative TermRef + * @param SkipCount a ConstantType referring to the number of skipped term parameters + * The annotation is short-lived since mapped symbols are discarded immediately + * once an overloading resolution step terminates. + */ +final class MappedAlternative[Prefix, SkipCount] extends Annotation diff --git a/library/src/scala/annotation/internal/CaptureChecked.scala b/library/src/scala/annotation/internal/WithPureFuns.scala similarity index 52% rename from library/src/scala/annotation/internal/CaptureChecked.scala rename to library/src/scala/annotation/internal/WithPureFuns.scala index 8392189f11f7..f0fc45c7f584 100644 --- a/library/src/scala/annotation/internal/CaptureChecked.scala +++ b/library/src/scala/annotation/internal/WithPureFuns.scala @@ -3,7 +3,7 @@ package internal import annotation.experimental /** A marker annotation on a toplevel class that indicates - * that the class was checked under -Ycc + * that the class was typed with the pureFunctions language import. */ -@experimental class CaptureChecked extends StaticAnnotation +@experimental class WithPureFuns extends StaticAnnotation diff --git a/library/src-bootstrapped/scala/internal/requiresCapability.scala b/library/src/scala/annotation/internal/requiresCapability.scala similarity index 100% rename from library/src-bootstrapped/scala/internal/requiresCapability.scala rename to library/src/scala/annotation/internal/requiresCapability.scala diff --git a/library/src-bootstrapped/scala/annotation/retains.scala b/library/src/scala/annotation/retains.scala similarity index 82% rename from library/src-bootstrapped/scala/annotation/retains.scala rename to library/src/scala/annotation/retains.scala index 0d0099de75fb..0387840ea8bd 100644 --- a/library/src-bootstrapped/scala/annotation/retains.scala +++ b/library/src/scala/annotation/retains.scala @@ -11,5 +11,5 @@ package scala.annotation * The annotation can also be written explicitly if one wants to avoid the * non-standard capturing type syntax. */ -@experimental class retains(xs: Any*) extends annotation.StaticAnnotation - +@experimental +class retains(xs: Any*) extends annotation.StaticAnnotation diff --git a/library/src-bootstrapped/scala/annotation/retainsByName.scala b/library/src/scala/annotation/retainsByName.scala similarity index 100% rename from library/src-bootstrapped/scala/annotation/retainsByName.scala rename to library/src/scala/annotation/retainsByName.scala diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala new file mode 100644 index 000000000000..fb1721f98b35 --- /dev/null +++ b/library/src/scala/caps.scala @@ -0,0 +1,32 @@ +package scala + +import annotation.experimental + +@experimental object caps: + + /** The universal capture reference */ + val `*`: Any = () + + object unsafe: + + /** If argument is of type `cs T`, converts to type `box cs T`. This + * avoids the error that would be raised when boxing `*`. + */ + extension [T](x: T) def unsafeBox: T = x + + /** If argument is of type `box cs T`, converts to type `cs T`. This + * avoids the error that would be raised when unboxing `*`. + */ + extension [T](x: T) def unsafeUnbox: T = x + + /** If argument is of type `box cs T`, converts to type `cs T`. This + * avoids the error that would be raised when unboxing `*`. + */ + extension [T, U](f: T => U) def unsafeBoxFunArg: T => U = f + end unsafe + + /** Mixing in this trait forces a trait or class to be pure, i.e. + * have no capabilities retained in its self type. + */ + trait Pure: + this: Pure => diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 3e2863f2260b..edf8aa61b559 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -1,6 +1,7 @@ package scala.quoted import scala.annotation.experimental +import scala.annotation.implicitNotFound import scala.reflect.TypeTest /** Current Quotes in scope @@ -14,14 +15,32 @@ import scala.reflect.TypeTest * } * ``` */ -transparent inline def quotes(using inline q: Quotes): q.type = q +transparent inline def quotes(using q: Quotes): q.type = q /** Quotation context provided by a macro expansion or in the scope of `scala.quoted.staging.run`. * Used to perform all operations on quoted `Expr` or `Type`. * * It contains the low-level Typed AST API metaprogramming API. * This API does not have the static type guarantees that `Expr` and `Type` provide. + * `Quotes` are generated from an enclosing `${ ... }` or `scala.staging.run`. For example: + * ```scala sc:nocompile + * import scala.quoted._ + * inline def myMacro: Expr[T] = + * ${ /* (quotes: Quotes) ?=> */ myExpr } + * def myExpr(using Quotes): Expr[T] = + * '{ f(${ /* (quotes: Quotes) ?=> */ myOtherExpr }) } + * } + * def myOtherExpr(using Quotes): Expr[U] = '{ ... } + * ``` */ + +@implicitNotFound("""explain=Maybe this method is missing a `(using Quotes)` parameter. + +Maybe that splice `$ { ... }` is missing? +Given instances of `Quotes` are generated from an enclosing splice `$ { ... }` (or `scala.staging.run` call). +A splice can be thought as a method with the following signature. + def $[T](body: Quotes ?=> Expr[T]): T +""") trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // Extension methods for `Expr[T]` @@ -467,9 +486,33 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * otherwise the can be `Term` containing the `New` applied to the parameters of the extended class. * @param body List of members of the class. The members must align with the members of `cls`. */ + // TODO add selfOpt: Option[ValDef]? @experimental def apply(cls: Symbol, parents: List[Tree /* Term | TypeTree */], body: List[Statement]): ClassDef def copy(original: Tree)(name: String, constr: DefDef, parents: List[Tree /* Term | TypeTree */], selfOpt: Option[ValDef], body: List[Statement]): ClassDef def unapply(cdef: ClassDef): (String, DefDef, List[Tree /* Term | TypeTree */], Option[ValDef], List[Statement]) + + + /** Create the ValDef and ClassDef of a module (equivalent to an `object` declaration in source code). + * + * Equivalent to + * ``` + * def module(module: Symbol, parents: List[Tree], body: List[Statement]): (ValDef, ClassDef) = + * val modCls = module.moduleClass + * val modClassDef = ClassDef(modCls, parents, body) + * val modValDef = ValDef(module, Some(Apply(Select(New(TypeIdent(modCls)), cls.primaryConstructor), Nil))) + * List(modValDef, modClassDef) + * ``` + * + * @param module the module symbol (created using `Symbol.newModule`) + * @param parents parents of the module class + * @param body body of the module class + * @return The module lazy val definition and module class definition. + * These should be added one after the other (in that order) in the body of a class or statements of a block. + * + * @syntax markdown + */ + // TODO add selfOpt: Option[ValDef]? + @experimental def module(module: Symbol, parents: List[Tree /* Term | TypeTree */], body: List[Statement]): (ValDef, ClassDef) } /** Makes extension methods on `ClassDef` available without any imports */ @@ -2350,7 +2393,16 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is this a given parameter clause `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ def isGiven: Boolean /** Is this a erased parameter clause `(erased x1: X1, ..., xn: Xn)` */ + // TODO:deprecate in 3.4 and stabilize `erasedArgs` and `hasErasedArgs`. + // @deprecated("Use `hasErasedArgs`","3.4") def isErased: Boolean + + /** List of `erased` flags for each parameter of the clause */ + @experimental + def erasedArgs: List[Boolean] + /** Whether the clause has any erased parameters */ + @experimental + def hasErasedArgs: Boolean end TermParamClauseMethods /** A type parameter clause `[X1, ..., Xn]` */ @@ -2626,7 +2678,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => */ def isContextFunctionType: Boolean - /** Is this type an erased function type? + /** Is this type a function type with erased parameters? * * @see `isFunctionType` */ @@ -3119,9 +3171,19 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Extension methods of `MethodType` */ trait MethodTypeMethods: extension (self: MethodType) - /** Is this the type of given parameter clause `(implicit X1, ..., Xn)`, `(given X1, ..., Xn)` or `(given x1: X1, ..., xn: Xn)` */ + /** Is this the type of using parameter clause `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ def isImplicit: Boolean + /** Is this the type of erased parameter clause `(erased x1: X1, ..., xn: Xn)` */ + // TODO:deprecate in 3.4 and stabilize `erasedParams` and `hasErasedParams`. + // @deprecated("Use `hasErasedParams`","3.4") def isErased: Boolean + + /** List of `erased` flags for each parameters of the clause */ + @experimental + def erasedParams: List[Boolean] + /** Whether the clause has any erased parameters */ + @experimental + def hasErasedParams: Boolean def param(idx: Int): TypeRepr end extension end MethodTypeMethods @@ -3638,8 +3700,67 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ + // TODO: add flags and privateWithin @experimental def newClass(parent: Symbol, name: String, parents: List[TypeRepr], decls: Symbol => List[Symbol], selfType: Option[TypeRepr]): Symbol + /** Generates a new module symbol with an associated module class symbol, + * this is equivalent to an `object` declaration in source code. + * This method returns the module symbol. The module class can be accessed calling `moduleClass` on this symbol. + * + * Example usage: + * ```scala + * //{ + * given Quotes = ??? + * import quotes.reflect._ + * //} + * val moduleName: String = Symbol.freshName("MyModule") + * val parents = List(TypeTree.of[Object]) + * def decls(cls: Symbol): List[Symbol] = + * List(Symbol.newMethod(cls, "run", MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Unit]), Flags.EmptyFlags, Symbol.noSymbol)) + * + * val mod = Symbol.newModule(Symbol.spliceOwner, moduleName, Flags.EmptyFlags, Flags.EmptyFlags, parents.map(_.tpe), decls, Symbol.noSymbol) + * val cls = mod.moduleClass + * val runSym = cls.declaredMethod("run").head + * + * val runDef = DefDef(runSym, _ => Some('{ println("run") }.asTerm)) + * val modDef = ClassDef.module(mod, parents, body = List(runDef)) + * + * val callRun = Apply(Select(Ref(mod), runSym), Nil) + * + * Block(modDef.toList, callRun) + * ``` + * constructs the equivalent to + * ```scala + * //{ + * given Quotes = ??? + * import quotes.reflect._ + * //} + * '{ + * object MyModule$macro$1 extends Object: + * def run(): Unit = println("run") + * MyModule$macro$1.run() + * } + * ``` + * + * @param parent The owner of the class + * @param name The name of the class + * @param modFlags extra flags with which the module symbol should be constructed + * @param clsFlags extra flags with which the module class symbol should be constructed + * @param parents The parent classes of the class. The first parent must not be a trait. + * @param decls A function that takes the symbol of the module class as input and return the symbols of its declared members + * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. + * + * This symbol starts without an accompanying definition. + * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing + * this symbol to `ClassDef.module`. + * + * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be + * direct or indirect children of the reflection context's owner. + * + * @syntax markdown + */ + @experimental def newModule(owner: Symbol, name: String, modFlags: Flags, clsFlags: Flags, parents: List[TypeRepr], decls: Symbol => List[Symbol], privateWithin: Symbol): Symbol + /** Generates a new method symbol with the given parent, name and type. * * To define a member method of a class, use the `newMethod` within the `decls` function of `newClass`. @@ -3675,7 +3796,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing * this symbol to the ValDef constructor. * - * Note: Also see reflect.let + * Note: Also see ValDef.let * * @param parent The owner of the val/var/lazy val * @param name The name of the val/var/lazy val @@ -3704,6 +3825,18 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Definition not available */ def noSymbol: Symbol + + /** A fresh name for class or member symbol names. + * + * Fresh names are constructed using the following format `prefix + "$macro$" + freshIndex`. + * The `freshIndex` are unique within the current source file. + * + * Examples: See `scala.annotation.MacroAnnotation` + * + * @param prefix Prefix of the fresh name + */ + @experimental + def freshName(prefix: String): String } /** Makes extension methods on `Symbol` available without any imports */ @@ -3734,6 +3867,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** The full name of this symbol up to the root package */ def fullName: String + /** Type of the definition */ + @experimental + def info: TypeRepr + /** The position of this symbol */ def pos: Option[Position] @@ -3879,17 +4016,17 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def declaredTypes: List[Symbol] /** Type member with the given name directly declared in the class */ - @deprecated("Use typeMember", "3.1.0") + @deprecated("Use declaredType or typeMember", "3.1.0") def memberType(name: String): Symbol - /** Type member with the given name directly declared in the class */ + /** Type member with the given name declared or inherited in the class */ def typeMember(name: String): Symbol /** Type member directly declared in the class */ - @deprecated("Use typeMembers", "3.1.0") + @deprecated("Use declaredTypes or typeMembers", "3.1.0") def memberTypes: List[Symbol] - /** Type member directly declared in the class */ + /** Type member directly declared or inherited in the class */ def typeMembers: List[Symbol] /** All members directly declared in the class */ @@ -4155,8 +4292,31 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * - ... * - Nth element is `FunctionN` */ + // TODO: deprecate in 3.4 and stabilize FunctionClass(Int)/FunctionClass(Int,Boolean) + // @deprecated("Use overload of `FunctionClass` with 1 or 2 arguments","3.4") def FunctionClass(arity: Int, isImplicit: Boolean = false, isErased: Boolean = false): Symbol + /** Class symbol of a function class `scala.FunctionN`. + * + * @param arity the arity of the function where `0 <= arity` + * @return class symbol of `scala.FunctionN` where `N == arity` + */ + @experimental + def FunctionClass(arity: Int): Symbol + + /** Class symbol of a context function class `scala.FunctionN` or `scala.ContextFunctionN`. + * + * @param arity the arity of the function where `0 <= arity` + * @param isContextual if it is a `scala.ContextFunctionN` + * @return class symbol of `scala.FunctionN` or `scala.ContextFunctionN` where `N == arity` + */ + @experimental + def FunctionClass(arity: Int, isContextual: Boolean): Symbol + + /** The `scala.runtime.ErasedFunction` built-in trait. */ + @experimental + def ErasedFunctionClass: Symbol + /** Function-like object that maps arity to symbols for classes `scala.TupleX`. * - 0th element is `NoSymbol` * - 1st element is `NoSymbol` @@ -4201,7 +4361,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // FLAGS // /////////////// - /** FlagSet of a Symbol */ + /** Flags of a Symbol */ type Flags /** Module object of `type Flags` */ @@ -4278,6 +4438,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is implemented as a Java static */ def JavaStatic: Flags + /** Is this an annotation defined in Java */ + @experimental def JavaAnnotation: Flags + /** Is this symbol `lazy` */ def Lazy: Flags @@ -4336,7 +4499,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def StableRealizable: Flags /** Is this symbol marked as static. Mapped to static Java member */ - def Static: Flags + @deprecated("Use JavaStatic instead", "3.3.0") def Static: Flags /** Is this symbol to be tagged Java Synthetic */ def Synthetic: Flags @@ -4370,6 +4533,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => end extension } + /////////////// // POSITIONS // /////////////// @@ -4748,7 +4912,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => case self: ValDef => self } val body = tree.body.map(transformStatement(_)(tree.symbol)) - ClassDef.copy(tree)(tree.name, constructor.asInstanceOf[DefDef], parents, self, body) // cast as workaround for lampepfl/dotty#14821. TODO remove when referenceVersion >= 3.2.0-RC1 + ClassDef.copy(tree)(tree.name, constructor, parents, self, body) case tree: Import => Import.copy(tree)(transformTerm(tree.expr)(owner), tree.selectors) case tree: Export => diff --git a/library/src/scala/quoted/runtime/QuoteMatching.scala b/library/src/scala/quoted/runtime/QuoteMatching.scala index 2a76143e9868..c95ffe87b5dc 100644 --- a/library/src/scala/quoted/runtime/QuoteMatching.scala +++ b/library/src/scala/quoted/runtime/QuoteMatching.scala @@ -17,7 +17,7 @@ trait QuoteMatching: * - `ExprMatch.unapply('{ f(0, myInt) })('{ f(patternHole[Int], patternHole[Int]) }, _)` * will return `Some(Tuple2('{0}, '{ myInt }))` * - `ExprMatch.unapply('{ f(0, "abc") })('{ f(0, patternHole[Int]) }, _)` - * will return `None` due to the missmatch of types in the hole + * will return `None` due to the mismatch of types in the hole * * Holes: * - scala.quoted.runtime.Patterns.patternHole[T]: hole that matches an expression `x` of type `Expr[U]` @@ -27,7 +27,7 @@ trait QuoteMatching: * @param pattern `Expr[Any]` containing the pattern tree * @return None if it did not match, `Some(tup)` if it matched where `tup` contains `Expr[Ti]`` */ - def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: Expr[Any])(using pattern: Expr[Any]): Option[Tup] + def unapply[TypeBindings, Tup <: Tuple](scrutinee: Expr[Any])(using pattern: Expr[Any]): Option[Tup] } val TypeMatch: TypeMatchModule @@ -40,5 +40,10 @@ trait QuoteMatching: * @param pattern `Type[?]` containing the pattern tree * @return None if it did not match, `Some(tup)` if it matched where `tup` contains `Type[Ti]`` */ - def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: Type[?])(using pattern: Type[?]): Option[Tup] + def unapply[TypeBindings, Tup <: Tuple](scrutinee: Type[?])(using pattern: Type[?]): Option[Tup] } + +object QuoteMatching: + type KList + type KCons[+H <: AnyKind, +T <: KList] <: KList + type KNil <: KList diff --git a/library/src/scala/runtime/ErasedFunction.scala b/library/src/scala/runtime/ErasedFunction.scala new file mode 100644 index 000000000000..7e9211bba75a --- /dev/null +++ b/library/src/scala/runtime/ErasedFunction.scala @@ -0,0 +1,11 @@ +package scala.runtime + +import scala.annotation.experimental + +/** Marker trait for function types with erased parameters. + * + * This trait will be refined with an `apply` method with erased parameters: + * ErasedFunction { def apply([erased] x_1: P_1, ..., [erased] x_N: P_N): R } + * This type will be erased to FunctionL, where L = N - count(erased). + */ +@experimental trait ErasedFunction diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index 7a66639a826a..0edbe0e748f4 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -1,5 +1,7 @@ package scala.runtime +import java.util.concurrent.CountDownLatch + import scala.annotation.* /** @@ -7,24 +9,27 @@ import scala.annotation.* */ object LazyVals { @nowarn - private[this] val unsafe: sun.misc.Unsafe = - classOf[sun.misc.Unsafe].getDeclaredFields.nn.find { field => - field.nn.getType == classOf[sun.misc.Unsafe] && { - field.nn.setAccessible(true) - true - } - } - .map(_.nn.get(null).asInstanceOf[sun.misc.Unsafe]) - .getOrElse { - throw new ExceptionInInitializerError { - new IllegalStateException("Can't find instance of sun.misc.Unsafe") - } - } + private[this] val unsafe: sun.misc.Unsafe = { + def throwInitializationException() = + throw new ExceptionInInitializerError( + new IllegalStateException("Can't find instance of sun.misc.Unsafe") + ) + try + val unsafeField = classOf[sun.misc.Unsafe].getDeclaredField("theUnsafe").nn + if unsafeField.getType == classOf[sun.misc.Unsafe] then + unsafeField.setAccessible(true) + unsafeField.get(null).asInstanceOf[sun.misc.Unsafe] + else + throwInitializationException() + catch case _: NoSuchFieldException => + throwInitializationException() + } private[this] val base: Int = { val processors = java.lang.Runtime.getRuntime.nn.availableProcessors() 8 * processors * processors } + private[this] val monitors: Array[Object] = Array.tabulate(base)(_ => new Object) @@ -40,6 +45,27 @@ object LazyVals { /* ------------- Start of public API ------------- */ + // This trait extends Serializable to fix #16806 that caused a race condition + sealed trait LazyValControlState extends Serializable + + /** + * Used to indicate the state of a lazy val that is being + * evaluated and of which other threads await the result. + */ + final class Waiting extends CountDownLatch(1) with LazyValControlState + + /** + * Used to indicate the state of a lazy val that is currently being + * evaluated with no other thread awaiting its result. + */ + object Evaluating extends LazyValControlState + + /** + * Used to indicate the state of a lazy val that has been evaluated to + * `null`. + */ + object NullValue extends LazyValControlState + final val BITS_PER_LAZY_VAL = 2L def STATE(cur: Long, ord: Int): Long = { @@ -57,6 +83,12 @@ object LazyVals { unsafe.compareAndSwapLong(t, offset, e, n) } + def objCAS(t: Object, offset: Long, exp: Object, n: Object): Boolean = { + if (debug) + println(s"objCAS($t, $exp, $n)") + unsafe.compareAndSwapObject(t, offset, exp, n) + } + def setFlag(t: Object, offset: Long, v: Int, ord: Int): Unit = { if (debug) println(s"setFlag($t, $offset, $v, $ord)") @@ -102,6 +134,7 @@ object LazyVals { unsafe.getLongVolatile(t, off) } + // kept for backward compatibility def getOffset(clz: Class[_], name: String): Long = { @nowarn val r = unsafe.objectFieldOffset(clz.getDeclaredField(name)) @@ -110,6 +143,14 @@ object LazyVals { r } + def getStaticFieldOffset(field: java.lang.reflect.Field): Long = { + @nowarn + val r = unsafe.staticFieldOffset(field) + if (debug) + println(s"getStaticFieldOffset(${field.getDeclaringClass}, ${field.getName}) = $r") + r + } + def getOffsetStatic(field: java.lang.reflect.Field) = @nowarn val r = unsafe.objectFieldOffset(field) diff --git a/library/src/scala/runtime/coverage/Invoker.scala b/library/src/scala/runtime/coverage/Invoker.scala index ee37a477cbe6..c35c6c2ec7df 100644 --- a/library/src/scala/runtime/coverage/Invoker.scala +++ b/library/src/scala/runtime/coverage/Invoker.scala @@ -1,11 +1,11 @@ package scala.runtime.coverage -import scala.collection.mutable.{BitSet, AnyRefMap} +import scala.annotation.internal.sharable +import scala.annotation.nowarn import scala.collection.concurrent.TrieMap +import scala.collection.mutable.{BitSet, AnyRefMap} +import java.io.{File, FileWriter} import java.nio.file.Files -import java.io.FileWriter -import java.io.File -import scala.annotation.internal.sharable @sharable // avoids false positive by -Ycheck-reentrant object Invoker { @@ -48,6 +48,7 @@ object Invoker { writer.write('\n') writer.flush() + @nowarn("cat=deprecation") def measurementFile(dataDir: String): File = new File( dataDir, MeasurementsPrefix + runtimeUUID + "." + Thread.currentThread.nn.getId diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 3b7d009ff6f3..09feaf11c31d 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -31,7 +31,7 @@ object Predef: * @tparam T the type of the value to be summoned * @return the given value typed: the provided type parameter */ - transparent inline def summon[T](using inline x: T): x.type = x + transparent inline def summon[T](using x: T): x.type = x // Extension methods for working with explicit nulls diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 2be4861b4cc2..d92495c6f5aa 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -51,6 +51,7 @@ object language: /** Experimental support for using indentation for arguments */ @compileTimeOnly("`fewerBraces` can only be used at compile time in import statements") + @deprecated("`fewerBraces` is now standard, no language import is needed", since = "3.3") object fewerBraces /** Experimental support for typechecked exception capabilities @@ -60,6 +61,35 @@ object language: @compileTimeOnly("`saferExceptions` can only be used at compile time in import statements") object saferExceptions + /** Adds support for clause interleaving: + * Methods can now have as many type clauses as they like, this allows to have type bounds depend on terms: `def f(x: Int)[A <: x.type]: A` + * + * @see [[http://dotty.epfl.ch/docs/reference/other-new-features/explicit-nulls.html]] + */ + @compileTimeOnly("`clauseInterleaving` can only be used at compile time in import statements") + object clauseInterleaving + + /** Experimental support for pure function type syntax + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/purefuns]] + */ + @compileTimeOnly("`pureFunctions` can only be used at compile time in import statements") + object pureFunctions + + /** Experimental support for capture checking; implies support for pureFunctions + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/cc]] + */ + @compileTimeOnly("`captureChecking` can only be used at compile time in import statements") + object captureChecking + + /** Experimental support for automatic conversions of arguments, without requiring + * a langauge import `import scala.language.implicitConversions`. + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] + */ + @compileTimeOnly("`into` can only be used at compile time in import statements") + object into end experimental /** The deprecated object contains features that are no longer officially suypported in Scala. @@ -179,7 +209,6 @@ object language: @compileTimeOnly("`3.2` can only be used at compile time in import statements") object `3.2` -/* This can be added when we go to 3.3 /** Set source version to 3.3-migration. * * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] @@ -193,5 +222,5 @@ object language: */ @compileTimeOnly("`3.3` can only be used at compile time in import statements") object `3.3` -*/ + end language diff --git a/library/src/scala/util/NotGiven.scala b/library/src/scala/util/NotGiven.scala index 99cc903d4426..973e709042cb 100644 --- a/library/src/scala/util/NotGiven.scala +++ b/library/src/scala/util/NotGiven.scala @@ -31,11 +31,13 @@ trait LowPriorityNotGiven { } object NotGiven extends LowPriorityNotGiven { + private val cachedValue = new NotGiven[Nothing]() + /** A value of type `NotGiven` to signal a successful search for `NotGiven[C]` (i.e. a failing * search for `C`). A reference to this value will be explicitly constructed by Dotty's * implicit search algorithm */ - def value: NotGiven[Nothing] = new NotGiven[Nothing]() + def value: NotGiven[Nothing] = cachedValue /** One of two ambiguous methods used to emulate negation in Scala 2 */ given amb1[T](using ev: T): NotGiven[T] = ??? diff --git a/library/src/scala/util/boundary.scala b/library/src/scala/util/boundary.scala new file mode 100644 index 000000000000..2edd754bbb93 --- /dev/null +++ b/library/src/scala/util/boundary.scala @@ -0,0 +1,64 @@ +package scala.util +import scala.annotation.implicitNotFound + +/** A boundary that can be exited by `break` calls. + * `boundary` and `break` represent a unified and superior alternative for the + * `scala.util.control.NonLocalReturns` and `scala.util.control.Breaks` APIs. + * The main differences are: + * + * - Unified names: `boundary` to establish a scope, `break` to leave it. + * `break` can optionally return a value. + * - Integration with exceptions. `break`s are logically non-fatal exceptions. + * The `Break` exception class extends `RuntimeException` and is optimized so + * that stack trace generation is suppressed. + * - Better performance: breaks to enclosing scopes in the same method can + * be rewritten to jumps. + * + * Example usage: + * + * import scala.util.boundary, boundary.break + * + * def firstIndex[T](xs: List[T], elem: T): Int = + * boundary: + * for (x, i) <- xs.zipWithIndex do + * if x == elem then break(i) + * -1 + */ +object boundary: + + /** User code should call `break.apply` instead of throwing this exception + * directly. + */ + final class Break[T] private[boundary](val label: Label[T], val value: T) + extends RuntimeException( + /*message*/ null, /*cause*/ null, /*enableSuppression=*/ false, /*writableStackTrace*/ false) + + /** Labels are targets indicating which boundary will be exited by a `break`. + */ + @implicitNotFound("explain=A Label is generated from an enclosing `scala.util.boundary` call.\nMaybe that boundary is missing?") + final class Label[-T] + + /** Abort current computation and instead return `value` as the value of + * the enclosing `boundary` call that created `label`. + */ + def break[T](value: T)(using label: Label[T]): Nothing = + throw Break(label, value) + + /** Abort current computation and instead continue after the `boundary` call that + * created `label`. + */ + def break()(using label: Label[Unit]): Nothing = + throw Break(label, ()) + + /** Run `body` with freshly generated label as implicit argument. Catch any + * breaks associated with that label and return their results instead of + * `body`'s result. + */ + inline def apply[T](inline body: Label[T] ?=> T): T = + val local = Label[T]() + try body(using local) + catch case ex: Break[T] @unchecked => + if ex.label eq local then ex.value + else throw ex + +end boundary diff --git a/library/src/scala/util/control/NonLocalReturns.scala b/library/src/scala/util/control/NonLocalReturns.scala index c32e0ff16457..ad4dc05f36ac 100644 --- a/library/src/scala/util/control/NonLocalReturns.scala +++ b/library/src/scala/util/control/NonLocalReturns.scala @@ -7,8 +7,19 @@ package scala.util.control * import scala.util.control.NonLocalReturns.* * * returning { ... throwReturn(x) ... } + * + * This API has been deprecated. Its functionality is better served by + * + * - `scala.util.boundary` in place of `returning` + * - `scala.util.break` in place of `throwReturn` + * + * The new abstractions work with plain `RuntimeExceptions` and are more + * performant, since returns within the scope of the same method can be + * rewritten by the compiler to jumps. */ +@deprecated("Use scala.util.boundary instead", "3.3") object NonLocalReturns { + @deprecated("Use scala.util.boundary.Break instead", "3.3") class ReturnThrowable[T] extends ControlThrowable { private var myResult: T = _ def throwReturn(result: T): Nothing = { @@ -19,10 +30,12 @@ object NonLocalReturns { } /** Performs a nonlocal return by throwing an exception. */ + @deprecated("Use scala.util.boundary.break instead", "3.3") def throwReturn[T](result: T)(using returner: ReturnThrowable[? >: T]): Nothing = returner.throwReturn(result) /** Enable nonlocal returns in `op`. */ + @deprecated("Use scala.util.boundary instead", "3.3") def returning[T](op: ReturnThrowable[T] ?=> T): T = { val returner = new ReturnThrowable[T] try op(using returner) diff --git a/project/Build.scala b/project/Build.scala index f79b66c8bf3f..82b7fee64879 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -80,9 +80,9 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.2.1-RC1" + val referenceVersion = "3.3.0-RC3" - val baseVersion = "3.2.2-RC1" + val baseVersion = "3.3.1-RC1" // Versions used by the vscode extension to create a new project // This should be the latest published releases. @@ -98,7 +98,7 @@ object Build { * set to 3.1.3. If it is going to be 3.1.0, it must be set to the latest * 3.0.x release. */ - val previousDottyVersion = "3.2.0" + val previousDottyVersion = "3.2.2" object CompatMode { final val BinaryCompatible = 0 @@ -120,8 +120,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.8" - case Bootstrapped => "2.13.8" + case NonBootstrapped => "2.13.10" + case Bootstrapped => "2.13.10" } val dottyOrganization = "org.scala-lang" @@ -360,6 +360,7 @@ object Build { // Settings used when compiling dotty with a non-bootstrapped dotty lazy val commonBootstrappedSettings = commonDottySettings ++ NoBloopExport.settings ++ Seq( + // To enable support of scaladoc and language-server projects you need to change this to true and use sbt as your build server bspEnabled := false, (Compile / unmanagedSourceDirectories) += baseDirectory.value / "src-bootstrapped", @@ -489,7 +490,8 @@ object Build { settings(commonJavaSettings). settings(commonMiMaSettings). settings( - versionScheme := Some("semver-spec") + versionScheme := Some("semver-spec"), + mimaBinaryIssueFilters ++= MiMaFilters.Interfaces ) /** Find an artifact with the given `name` in `classpath` */ @@ -545,7 +547,7 @@ object Build { // get libraries onboard libraryDependencies ++= Seq( - "org.scala-lang.modules" % "scala-asm" % "9.3.0-scala-1", // used by the backend + "org.scala-lang.modules" % "scala-asm" % "9.4.0-scala-1", // used by the backend Dependencies.oldCompilerInterface, // we stick to the old version to avoid deprecation warnings "org.jline" % "jline-reader" % "3.19.0", // used by the REPL "org.jline" % "jline-terminal" % "3.19.0", @@ -607,7 +609,7 @@ object Build { if (args.contains("--help")) { println( s""" - |usage: testCompilation [--help] [--from-tasty] [--update-checkfiles] [] + |usage: testCompilation [--help] [--from-tasty] [--update-checkfiles] [--failed] [] | |By default runs tests in dotty.tools.dotc.*CompilationTests and dotty.tools.dotc.coverage.*, |excluding tests tagged with dotty.SlowTests. @@ -615,6 +617,7 @@ object Build { | --help show this message | --from-tasty runs tests in dotty.tools.dotc.FromTastyTests | --update-checkfiles override the checkfiles that did not match with the current output + | --failed re-run only failed tests | substring of the path of the tests file | """.stripMargin @@ -623,11 +626,13 @@ object Build { } else { val updateCheckfile = args.contains("--update-checkfiles") + val rerunFailed = args.contains("--failed") val fromTasty = args.contains("--from-tasty") - val args1 = if (updateCheckfile | fromTasty) args.filter(x => x != "--update-checkfiles" && x != "--from-tasty") else args + val args1 = if (updateCheckfile | fromTasty | rerunFailed) args.filter(x => x != "--update-checkfiles" && x != "--from-tasty" && x != "--failed") else args val test = if (fromTasty) "dotty.tools.dotc.FromTastyTests" else "dotty.tools.dotc.*CompilationTests dotty.tools.dotc.coverage.*" val cmd = s" $test -- --exclude-categories=dotty.SlowTests" + (if (updateCheckfile) " -Ddotty.tests.updateCheckfiles=TRUE" else "") + + (if (rerunFailed) " -Ddotty.tests.rerunFailed=TRUE" else "") + (if (args1.nonEmpty) " -Ddotty.tests.filter=" + args1.mkString(" ") else "") (Test / testOnly).toTask(cmd) } @@ -839,6 +844,7 @@ object Build { "-sourcepath", (Compile / sourceDirectories).value.map(_.getAbsolutePath).distinct.mkString(File.pathSeparator), "-Yexplicit-nulls", ), + (Compile / doc / scalacOptions) ++= ScaladocConfigs.DefaultGenerationSettings.value.settings ) lazy val `scala3-library` = project.in(file("library")).asDottyLibrary(NonBootstrapped) @@ -1051,15 +1057,13 @@ object Build { // with the bootstrapped library on the classpath. lazy val `scala3-sbt-bridge-tests` = project.in(file("sbt-bridge/test")). dependsOn(dottyCompiler(Bootstrapped) % Test). + dependsOn(`scala3-sbt-bridge`). settings(commonBootstrappedSettings). settings( Compile / sources := Seq(), Test / scalaSource := baseDirectory.value, Test / javaSource := baseDirectory.value, - - // Tests disabled until zinc-api-info cross-compiles with 2.13, - // alternatively we could just copy in sources the part of zinc-api-info we need. - Test / sources := Seq() + libraryDependencies += ("org.scala-sbt" %% "zinc-apiinfo" % "1.8.0" % Test).cross(CrossVersion.for3Use2_13) ) lazy val `scala3-language-server` = project.in(file("language-server")). @@ -1127,6 +1131,7 @@ object Build { enablePlugins(DottyJSPlugin). dependsOn(`scala3-library-bootstrappedJS`). settings( + bspEnabled := false, scalacOptions --= Seq("-Xfatal-warnings", "-deprecation"), // Required to run Scala.js tests. @@ -1189,6 +1194,9 @@ object Build { "isFullOpt" -> (stage == FullOptStage), "compliantAsInstanceOfs" -> (sems.asInstanceOfs == CheckedBehavior.Compliant), "compliantArrayIndexOutOfBounds" -> (sems.arrayIndexOutOfBounds == CheckedBehavior.Compliant), + "compliantArrayStores" -> (sems.arrayStores == CheckedBehavior.Compliant), + "compliantNegativeArraySizes" -> (sems.negativeArraySizes == CheckedBehavior.Compliant), + "compliantStringIndexOutOfBounds" -> (sems.stringIndexOutOfBounds == CheckedBehavior.Compliant), "compliantModuleInit" -> (sems.moduleInit == CheckedBehavior.Compliant), "strictFloats" -> sems.strictFloats, "productionMode" -> sems.productionMode, @@ -1265,6 +1273,14 @@ object Build { ) }, + /* For some reason, in Scala 3, the implementation of IterableDefaultTest + * resolves to `scala.collection.ArrayOps.ArrayIterator`, whose `next()` + * method is not compliant when called past the last element on Scala.js. + * It relies on catching an `ArrayIndexOutOfBoundsException`. + * We have to ignore it here. + */ + Test / testOptions := Seq(Tests.Filter(_ != "org.scalajs.testsuite.javalib.lang.IterableDefaultTest")), + Test / managedResources ++= { val testDir = fetchScalaJSSource.value / "test-suite/js/src/test" @@ -1300,6 +1316,7 @@ object Build { Seq( "-Ddotty.tests.classes.dottyLibraryJS=" + dottyLibraryJSJar, + "-Ddotty.tests.classes.scalaJSJavalib=" + findArtifactPath(externalJSDeps, "scalajs-javalib"), "-Ddotty.tests.classes.scalaJSLibrary=" + findArtifactPath(externalJSDeps, "scalajs-library_2.13"), ) }, @@ -1319,6 +1336,7 @@ object Build { val generateSelfDocumentation = taskKey[Unit]("Generate example documentation") // Note: the two tasks below should be one, but a bug in Tasty prevents that val generateScalaDocumentation = inputKey[Unit]("Generate documentation for dotty lib") + val generateStableScala3Documentation = inputKey[Unit]("Generate documentation for stable dotty lib") val generateTestcasesDocumentation = taskKey[Unit]("Generate documentation for testcases, usefull for debugging tests") val generateReferenceDocumentation = inputKey[Unit]("Generate language reference documentation for Scala 3") @@ -1462,6 +1480,12 @@ object Build { writeAdditionalFiles.dependsOn(generateDocumentation(config)) }.evaluated, + generateStableScala3Documentation := Def.inputTaskDyn { + val extraArgs = spaceDelimited("").parsed + val config = stableScala3(extraArgs.head) + generateDocumentation(config) + }.evaluated, + generateTestcasesDocumentation := Def.taskDyn { generateDocumentation(Testcases) }.value, @@ -1498,7 +1522,7 @@ object Build { .add(OutputDir("scaladoc/output/reference")) .add(SiteRoot(s"${temp.getAbsolutePath}/docs")) .add(ProjectName("Scala 3 Reference")) - .add(ProjectVersion("3.1.3")) // TODO: Change that later to the current version tag. (This must happen on first forward this branch to stable release tag) + .add(ProjectVersion(baseVersion)) .remove[VersionsDictionaryUrl] .add(SourceLinks(List( s"${temp.getAbsolutePath}=github://lampepfl/dotty/language-reference-stable" @@ -1813,9 +1837,10 @@ object Build { settings(disableDocSetting). settings( versionScheme := Some("semver-spec"), - if (mode == Bootstrapped) { - commonMiMaSettings - } else { + if (mode == Bootstrapped) Def.settings( + commonMiMaSettings, + mimaBinaryIssueFilters ++= MiMaFilters.TastyCore, + ) else { Nil } ) @@ -1857,22 +1882,21 @@ object ScaladocConfigs { case None => s"${sourcesPrefix}github://lampepfl/dotty/$v$outputPrefix" } - lazy val DefaultGenerationConfig = Def.task { - def distLocation = (dist / pack).value - def projectVersion = version.value + def defaultSourceLinks(version: String = dottyNonBootstrappedVersion, refVersion: String = dottyVersion) = Def.task { def stdLibVersion = stdlibVersion(NonBootstrapped) - def scalaLib = findArtifactPath(externalCompilerClasspathTask.value, "scala-library") - def dottyLib = (`scala3-library` / Compile / classDirectory).value def srcManaged(v: String, s: String) = s"out/bootstrap/stdlib-bootstrapped/scala-$v/src_managed/main/$s-library-src" - - def defaultSourceLinks: SourceLinks = SourceLinks( + SourceLinks( List( - scalaSrcLink(stdLibVersion, srcManaged(dottyNonBootstrappedVersion, "scala") + "="), - dottySrcLink(referenceVersion, srcManaged(dottyNonBootstrappedVersion, "dotty") + "=", "#library/src"), - dottySrcLink(referenceVersion), + scalaSrcLink(stdLibVersion, srcManaged(version, "scala") + "="), + dottySrcLink(refVersion, srcManaged(version, "dotty") + "=", "#library/src"), + dottySrcLink(refVersion), "docs=github://lampepfl/dotty/main#docs" ) ) + } + + lazy val DefaultGenerationSettings = Def.task { + def projectVersion = version.value def socialLinks = SocialLinks(List( "github::https://github.com/lampepfl/dotty", "discord::https://discord.com/invite/scala", @@ -1890,7 +1914,7 @@ object ScaladocConfigs { List(), ProjectVersion(projectVersion), GenerateInkuire(true), - defaultSourceLinks, + defaultSourceLinks().value, skipByRegex, skipById, projectLogo, @@ -1901,17 +1925,22 @@ object ScaladocConfigs { Groups(true), QuickLinks( List( - "Download::https://www.scala-lang.org/download/", - "Documentation::https://docs.scala-lang.org/", - "Libraries::https://index.scala-lang.org", - "Contribute::https://www.scala-lang.org/contribute/", + "Learn::https://docs.scala-lang.org/", + "Install::https://www.scala-lang.org/download/", + "Playground::https://scastie.scala-lang.org", + "Find\u00A0A\u00A0Library::https://index.scala-lang.org", + "Community::https://www.scala-lang.org/community/", "Blog::https://www.scala-lang.org/blog/", - "Community::https://www.scala-lang.org/community/" ) ) ) } + lazy val DefaultGenerationConfig = Def.task { + def distLocation = (dist / pack).value + DefaultGenerationSettings.value + } + lazy val Scaladoc = Def.task { DefaultGenerationConfig.value .add(UseJavacp(true)) @@ -1981,4 +2010,31 @@ object ScaladocConfigs { .add(ApiSubdirectory(true)) .withTargets(roots) } + + def stableScala3(version: String) = Def.task { + Scala3.value + .add(defaultSourceLinks(version + "-bin-SNAPSHOT-nonbootstrapped", version).value) + .add(ProjectVersion(version)) + .add(SnippetCompiler( + List( + s"out/bootstrap/stdlib-bootstrapped/scala-$version-bin-SNAPSHOT-nonbootstrapped/src_managed/main/dotty-library-src/scala/quoted=compile", + s"out/bootstrap/stdlib-bootstrapped/scala-$version-bin-SNAPSHOT-nonbootstrapped/src_managed/main/dotty-library-src/scala/compiletime=compile" + ) + )) + .add(CommentSyntax(List( + s"out/bootstrap/stdlib-bootstrapped/scala-$version-bin-SNAPSHOT-nonbootstrapped/src_managed/main/dotty-library-src=markdown", + s"out/bootstrap/stdlib-bootstrapped/scala-$version-bin-SNAPSHOT-nonbootstrapped/src_managed/main/scala-library-src=wiki", + "wiki" + ))) + .add(DocRootContent(s"out/bootstrap/stdlib-bootstrapped/scala-$version-bin-SNAPSHOT-nonbootstrapped/src_managed/main/scala-library-src/rootdoc.txt")) + .withTargets( + Seq( + s"out/bootstrap/stdlib-bootstrapped/scala-$version-bin-SNAPSHOT-nonbootstrapped/classes", + s"tmp/interfaces/target/classes", + s"out/bootstrap/tasty-core-bootstrapped/scala-$version-bin-SNAPSHOT-nonbootstrapped/classes" + ) + ) + .remove[SiteRoot] + .remove[ApiSubdirectory] + } } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 3708ec528c79..1dbf732a5b6e 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -25,6 +25,6 @@ object Dependencies { "com.vladsch.flexmark" % "flexmark-ext-yaml-front-matter" % flexmarkVersion, ) - val newCompilerInterface = "org.scala-sbt" % "compiler-interface" % "1.7.1" + val newCompilerInterface = "org.scala-sbt" % "compiler-interface" % "1.8.0" val oldCompilerInterface = "org.scala-sbt" % "compiler-interface" % "1.3.5" } diff --git a/project/DocumentationWebsite.scala b/project/DocumentationWebsite.scala index 778c70ad2f0d..e24917a60803 100644 --- a/project/DocumentationWebsite.scala +++ b/project/DocumentationWebsite.scala @@ -14,7 +14,7 @@ object DocumentationWebsite { val contributorsTestcasesDestinationFile = Paths.get("scaladoc-testcases", "docs", "_assets", "js", "contributors.js").toFile - val contributorsDestinationFile = Paths.get("docs", "_assets", "js", "contributors.js").toFile + val contributorsDestinationFile = baseDest / "dotty_res" / "scripts" / "contributors.js" sbt.IO.copyFile(contributorsFile, contributorsTestcasesDestinationFile) sbt.IO.copyFile(contributorsFile, contributorsDestinationFile) @@ -25,8 +25,8 @@ object DocumentationWebsite { val cssCodeSnippetsSourceFile = cssSourceFileBase / "code-snippets.css" sbt.IO.copyFile(cssCodeSnippetsSourceFile, cssCodeSnippetsDesitnationFile) - val cssContentContributorsTestcasesDesitnationFile = Paths.get("docs", "_assets", "css", "content-contributors.css").toFile - val cssContentContributorsDesitnationFile = Paths.get("scaladoc-testcases", "docs", "_assets", "css", "content-contributors.css").toFile + val cssContentContributorsTestcasesDesitnationFile = Paths.get("scaladoc-testcases", "docs", "_assets", "css", "content-contributors.css").toFile + val cssContentContributorsDesitnationFile = baseDest / "dotty_res" / "styles" / "content-contributors.css" val cssContentContributorsSourceFile = cssContentContributorsSourceBaseFile / "content-contributors.css" sbt.IO.copyFile(cssContentContributorsSourceFile, cssContentContributorsTestcasesDesitnationFile) sbt.IO.copyFile(cssContentContributorsSourceFile, cssContentContributorsDesitnationFile) @@ -42,7 +42,7 @@ object DocumentationWebsite { import _root_.scala.concurrent._ import _root_.scala.concurrent.duration.Duration import ExecutionContext.Implicits.global - val inkuireVersion = "1.0.0-M3" + val inkuireVersion = "v1.0.0-M7" val inkuireLink = s"https://github.com/VirtusLab/Inkuire/releases/download/$inkuireVersion/inkuire.js" val inkuireDestinationFile = baseDest / "dotty_res" / "scripts" / "inkuire.js" sbt.IO.touch(inkuireDestinationFile) diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index ac68190d441d..cb15d82affb8 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -3,5 +3,45 @@ import com.typesafe.tools.mima.core._ object MiMaFilters { val Library: Seq[ProblemFilter] = Seq( + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.caps.unsafeBox"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.caps.unsafeUnbox"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.CanEqual.canEqualMap"), + ProblemFilters.exclude[MissingClassProblem]("scala.caps$Pure"), + ProblemFilters.exclude[MissingClassProblem]("scala.caps$unsafe$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.3-migration"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.3"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E3$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E3$minusmigration$"), + ProblemFilters.exclude[MissingClassProblem]("scala.util.boundary"), + ProblemFilters.exclude[MissingClassProblem]("scala.util.boundary$"), + ProblemFilters.exclude[MissingClassProblem]("scala.util.boundary$Break"), + ProblemFilters.exclude[MissingClassProblem]("scala.util.boundary$Label"), + ProblemFilters.exclude[MissingClassProblem]("scala.quoted.runtime.QuoteMatching$"), + + // Scala.js only: new runtime support class in 3.2.3; not available to users + ProblemFilters.exclude[MissingClassProblem]("scala.scalajs.runtime.AnonFunctionXXL"), + + // New experimental features in 3.3.X + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.clauseInterleaving"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$clauseInterleaving$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.into"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$into$"), + // end of New experimental features in 3.3.X + + // Added java.io.Serializable as LazyValControlState supertype + ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyVals$LazyValControlState"), + ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyVals$Waiting"), + + ) + val TastyCore: Seq[ProblemFilter] = Seq( + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyBuffer.reset"), + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.APPLYsigpoly"), + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyHash.pjwHash64"), + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.util.Util.dble") + ) + val Interfaces: Seq[ProblemFilter] = Seq( + ProblemFilters.exclude[ReversedMissingMethodProblem]("dotty.tools.dotc.interfaces.Diagnostic.diagnosticRelatedInformation"), + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.dotc.interfaces.Diagnostic.diagnosticRelatedInformation"), + ProblemFilters.exclude[MissingClassProblem]("dotty.tools.dotc.interfaces.DiagnosticRelatedInformation") ) } diff --git a/project/ScaladocGeneration.scala b/project/ScaladocGeneration.scala index c6c4393c071f..fd972311da1d 100644 --- a/project/ScaladocGeneration.scala +++ b/project/ScaladocGeneration.scala @@ -141,6 +141,7 @@ object ScaladocGeneration { def remove[T <: Arg[_]: ClassTag]: GenerationConfig def withTargets(targets: Seq[String]): GenerationConfig def serialize: String + def settings: Seq[String] } object GenerationConfig { @@ -173,6 +174,9 @@ object ScaladocGeneration { ++ targets ).mkString(" ") + override def settings: Seq[String] = + args.map(_.serialize) ++ targets + private def argsWithout[T <: Arg[_]]( implicit tag: ClassTag[T] ): (Option[T], Seq[Arg[_]]) = args.foldLeft[(Option[T], Seq[Arg[_]])]((None, Seq.empty)) { diff --git a/project/build.properties b/project/build.properties index 22af2628c413..8b9a0b0ab037 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.7.1 +sbt.version=1.8.0 diff --git a/project/build.sbt b/project/build.sbt index e19492c42022..188dfa5c6702 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -1,7 +1,4 @@ // Used by VersionUtil to get gitHash and commitDate libraryDependencies += "org.eclipse.jgit" % "org.eclipse.jgit" % "4.11.0.201803080745-r" - -Compile / unmanagedSourceDirectories += - baseDirectory.value / "../language-server/src/dotty/tools/languageserver/config" libraryDependencies += Dependencies.`jackson-databind` diff --git a/project/plugins.sbt b/project/plugins.sbt index b6bc5f1184b6..aba843ca2c3c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,7 +2,11 @@ // // e.g. addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.1.0") -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.10.1") +// some plugins haven't moved to scala-xml 2.x yet +libraryDependencySchemes += + "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always + +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.12.0") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.10") diff --git a/project/resources/referenceReplacements/_layouts/static-site-main.html b/project/resources/referenceReplacements/_layouts/static-site-main.html index a9114aa455ac..d50b86a0ba03 100644 --- a/project/resources/referenceReplacements/_layouts/static-site-main.html +++ b/project/resources/referenceReplacements/_layouts/static-site-main.html @@ -2,59 +2,25 @@ layout: main --- -
      - +
      + {{ content }} -